commit_id
stringlengths 40
40
| project
stringclasses 11
values | commit_message
stringlengths 3
3.04k
| type
stringclasses 3
values | url
stringclasses 11
values | git_diff
stringlengths 555
691k
|
|---|---|---|---|---|---|
b27e240fdbf9ad91690ae596c21f511808377582
|
spring-framework
|
Consistent use of varargs, consistent template- method order--
|
p
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-oxm/src/main/java/org/springframework/oxm/castor/CastorMarshaller.java b/spring-oxm/src/main/java/org/springframework/oxm/castor/CastorMarshaller.java
index ab78c0bd2272..7e8ecfc42e76 100644
--- a/spring-oxm/src/main/java/org/springframework/oxm/castor/CastorMarshaller.java
+++ b/spring-oxm/src/main/java/org/springframework/oxm/castor/CastorMarshaller.java
@@ -97,7 +97,7 @@ public class CastorMarshaller extends AbstractMarshaller implements Initializing
private String encoding = DEFAULT_ENCODING;
- private Class[] targetClasses;
+ private Class<?>[] targetClasses;
private String[] targetPackages;
@@ -172,7 +172,7 @@ public void setMappingLocation(Resource mappingLocation) {
/**
* Set the locations of the Castor XML Mapping files.
*/
- public void setMappingLocations(Resource[] mappingLocations) {
+ public void setMappingLocations(Resource... mappingLocations) {
this.mappingLocations = mappingLocations;
}
@@ -180,15 +180,15 @@ public void setMappingLocations(Resource[] mappingLocations) {
* Set the Castor target class. Alternative means of configuring {@code CastorMarshaller} for unmarshalling
* multiple classes include use of mapping files, and specifying packages with Castor descriptor classes.
*/
- public void setTargetClass(Class targetClass) {
- this.targetClasses = new Class[]{targetClass};
+ public void setTargetClass(Class<?> targetClass) {
+ this.targetClasses = new Class<?>[] {targetClass};
}
/**
* Set the Castor target classes. Alternative means of configuring {@code CastorMarshaller} for unmarshalling
* multiple classes include use of mapping files, and specifying packages with Castor descriptor classes.
*/
- public void setTargetClasses(Class[] targetClasses) {
+ public void setTargetClasses(Class<?>... targetClasses) {
this.targetClasses = targetClasses;
}
@@ -202,7 +202,7 @@ public void setTargetPackage(String targetPackage) {
/**
* Set the names of packages with the Castor descriptor classes.
*/
- public void setTargetPackages(String[] targetPackages) {
+ public void setTargetPackages(String... targetPackages) {
this.targetPackages = targetPackages;
}
@@ -458,8 +458,8 @@ public void afterPropertiesSet() throws CastorMappingException, IOException {
* @see XMLContext#addMapping(org.exolab.castor.mapping.Mapping)
* @see XMLContext#addClass(Class)
*/
- protected XMLContext createXMLContext(Resource[] mappingLocations, Class[] targetClasses, String[] targetPackages)
- throws MappingException, ResolverException, IOException {
+ protected XMLContext createXMLContext(Resource[] mappingLocations, Class<?>[] targetClasses,
+ String[] targetPackages) throws MappingException, ResolverException, IOException {
XMLContext context = new XMLContext();
if (!ObjectUtils.isEmpty(mappingLocations)) {
@@ -492,47 +492,46 @@ public boolean supports(Class<?> clazz) {
return true;
}
+
// Marshalling
@Override
- protected final void marshalDomNode(Object graph, Node node) throws XmlMappingException {
+ protected void marshalDomNode(Object graph, Node node) throws XmlMappingException {
marshalSaxHandlers(graph, DomUtils.createContentHandler(node), null);
}
@Override
- protected final void marshalSaxHandlers(Object graph, ContentHandler contentHandler, LexicalHandler lexicalHandler)
- throws XmlMappingException {
-
- Marshaller marshaller = xmlContext.createMarshaller();
- marshaller.setContentHandler(contentHandler);
- marshal(graph, marshaller);
+ protected void marshalXmlEventWriter(Object graph, XMLEventWriter eventWriter) throws XmlMappingException {
+ marshalSaxHandlers(graph, StaxUtils.createContentHandler(eventWriter), null);
}
@Override
- protected final void marshalOutputStream(Object graph, OutputStream outputStream)
- throws XmlMappingException, IOException {
-
- marshalWriter(graph, new OutputStreamWriter(outputStream, encoding));
+ protected void marshalXmlStreamWriter(Object graph, XMLStreamWriter streamWriter) throws XmlMappingException {
+ marshalSaxHandlers(graph, StaxUtils.createContentHandler(streamWriter), null);
}
@Override
- protected final void marshalWriter(Object graph, Writer writer) throws XmlMappingException, IOException {
+ protected void marshalSaxHandlers(Object graph, ContentHandler contentHandler, LexicalHandler lexicalHandler)
+ throws XmlMappingException {
+
Marshaller marshaller = xmlContext.createMarshaller();
- marshaller.setWriter(writer);
- marshal(graph, marshaller);
+ marshaller.setContentHandler(contentHandler);
+ doMarshal(graph, marshaller);
}
@Override
- protected final void marshalXmlEventWriter(Object graph, XMLEventWriter eventWriter) throws XmlMappingException {
- marshalSaxHandlers(graph, StaxUtils.createContentHandler(eventWriter), null);
+ protected void marshalOutputStream(Object graph, OutputStream outputStream) throws XmlMappingException, IOException {
+ marshalWriter(graph, new OutputStreamWriter(outputStream, encoding));
}
@Override
- protected final void marshalXmlStreamWriter(Object graph, XMLStreamWriter streamWriter) throws XmlMappingException {
- marshalSaxHandlers(graph, StaxUtils.createContentHandler(streamWriter), null);
+ protected void marshalWriter(Object graph, Writer writer) throws XmlMappingException, IOException {
+ Marshaller marshaller = xmlContext.createMarshaller();
+ marshaller.setWriter(writer);
+ doMarshal(graph, marshaller);
}
- private void marshal(Object graph, Marshaller marshaller) {
+ private void doMarshal(Object graph, Marshaller marshaller) {
try {
customizeMarshaller(marshaller);
marshaller.marshal(graph);
@@ -572,10 +571,11 @@ protected void customizeMarshaller(Marshaller marshaller) {
}
}
+
// Unmarshalling
@Override
- protected final Object unmarshalDomNode(Node node) throws XmlMappingException {
+ protected Object unmarshalDomNode(Node node) throws XmlMappingException {
try {
return createUnmarshaller().unmarshal(node);
}
@@ -585,9 +585,9 @@ protected final Object unmarshalDomNode(Node node) throws XmlMappingException {
}
@Override
- protected final Object unmarshalInputStream(InputStream inputStream) throws XmlMappingException, IOException {
+ protected Object unmarshalXmlEventReader(XMLEventReader eventReader) {
try {
- return createUnmarshaller().unmarshal(new InputSource(inputStream));
+ return createUnmarshaller().unmarshal(eventReader);
}
catch (XMLException ex) {
throw convertCastorException(ex, false);
@@ -595,9 +595,9 @@ protected final Object unmarshalInputStream(InputStream inputStream) throws XmlM
}
@Override
- protected final Object unmarshalReader(Reader reader) throws XmlMappingException, IOException {
+ protected Object unmarshalXmlStreamReader(XMLStreamReader streamReader) {
try {
- return createUnmarshaller().unmarshal(new InputSource(reader));
+ return createUnmarshaller().unmarshal(streamReader);
}
catch (XMLException ex) {
throw convertCastorException(ex, false);
@@ -605,7 +605,7 @@ protected final Object unmarshalReader(Reader reader) throws XmlMappingException
}
@Override
- protected final Object unmarshalSaxReader(XMLReader xmlReader, InputSource inputSource)
+ protected Object unmarshalSaxReader(XMLReader xmlReader, InputSource inputSource)
throws XmlMappingException, IOException {
UnmarshalHandler unmarshalHandler = createUnmarshaller().createHandler();
@@ -621,9 +621,9 @@ protected final Object unmarshalSaxReader(XMLReader xmlReader, InputSource input
}
@Override
- protected final Object unmarshalXmlEventReader(XMLEventReader eventReader) {
+ protected Object unmarshalInputStream(InputStream inputStream) throws XmlMappingException, IOException {
try {
- return createUnmarshaller().unmarshal(eventReader);
+ return createUnmarshaller().unmarshal(new InputSource(inputStream));
}
catch (XMLException ex) {
throw convertCastorException(ex, false);
@@ -631,9 +631,9 @@ protected final Object unmarshalXmlEventReader(XMLEventReader eventReader) {
}
@Override
- protected final Object unmarshalXmlStreamReader(XMLStreamReader streamReader) {
+ protected Object unmarshalReader(Reader reader) throws XmlMappingException, IOException {
try {
- return createUnmarshaller().unmarshal(streamReader);
+ return createUnmarshaller().unmarshal(new InputSource(reader));
}
catch (XMLException ex) {
throw convertCastorException(ex, false);
@@ -679,6 +679,7 @@ protected void customizeUnmarshaller(Unmarshaller unmarshaller) {
}
}
+
/**
* Convert the given {@code XMLException} to an appropriate exception from the
* {@code org.springframework.oxm} hierarchy.
diff --git a/spring-oxm/src/main/java/org/springframework/oxm/jaxb/Jaxb2Marshaller.java b/spring-oxm/src/main/java/org/springframework/oxm/jaxb/Jaxb2Marshaller.java
index 9e513f435966..d182b0938f51 100644
--- a/spring-oxm/src/main/java/org/springframework/oxm/jaxb/Jaxb2Marshaller.java
+++ b/spring-oxm/src/main/java/org/springframework/oxm/jaxb/Jaxb2Marshaller.java
@@ -297,7 +297,7 @@ public void setValidationEventHandler(ValidationEventHandler validationEventHand
* Specify the {@code XmlAdapter}s to be registered with the JAXB {@code Marshaller}
* and {@code Unmarshaller}
*/
- public void setAdapters(XmlAdapter<?, ?>[] adapters) {
+ public void setAdapters(XmlAdapter<?, ?>... adapters) {
this.adapters = adapters;
}
@@ -311,7 +311,7 @@ public void setSchema(Resource schemaResource) {
/**
* Set the schema resources to use for validation.
*/
- public void setSchemas(Resource[] schemaResources) {
+ public void setSchemas(Resource... schemaResources) {
this.schemaResources = schemaResources;
}
diff --git a/spring-oxm/src/main/java/org/springframework/oxm/jibx/JibxMarshaller.java b/spring-oxm/src/main/java/org/springframework/oxm/jibx/JibxMarshaller.java
index b374b52f094b..494ac097f95d 100644
--- a/spring-oxm/src/main/java/org/springframework/oxm/jibx/JibxMarshaller.java
+++ b/spring-oxm/src/main/java/org/springframework/oxm/jibx/JibxMarshaller.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2012 the original author or authors.
+ * Copyright 2002-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -84,6 +84,7 @@ public class JibxMarshaller extends AbstractMarshaller implements InitializingBe
private static final String DEFAULT_BINDING_NAME = "binding";
+
private Class<?> targetClass;
private String targetPackage;
@@ -112,7 +113,6 @@ public class JibxMarshaller extends AbstractMarshaller implements InitializingBe
/**
* Set the target class for this instance. Setting either this property or the
* {@link #setTargetPackage(String) targetPackage} property is required.
- *
* <p>If this property is set, {@link #setTargetPackage(String) targetPackage} is ignored.
*/
public void setTargetClass(Class<?> targetClass) {
@@ -122,7 +122,6 @@ public void setTargetClass(Class<?> targetClass) {
/**
* Set the target package for this instance. Setting either this property or the
* {@link #setTargetClass(Class) targetClass} property is required.
- *
* <p>If {@link #setTargetClass(Class) targetClass} is set, this property is ignored.
*/
public void setTargetPackage(String targetPackage) {
@@ -157,10 +156,9 @@ public void setStandalone(Boolean standalone) {
}
/**
- * Sets the root element name for the DTD declaration written when marshalling. By default, this is
- * {@code null} (i.e. no DTD declaration is written). If set to a value, the system ID or public ID also need to
- * be set.
- *
+ * Set the root element name for the DTD declaration written when marshalling.
+ * By default, this is {@code null} (i.e. no DTD declaration is written).
+ * <p>If set to a value, the system ID or public ID also need to be set.
* @see #setDocTypeSystemId(String)
* @see #setDocTypePublicId(String)
*/
@@ -169,10 +167,9 @@ public void setDocTypeRootElementName(String docTypeRootElementName) {
}
/**
- * Sets the system Id for the DTD declaration written when marshalling. By default, this is
- * {@code null}. Only used when the root element also has been set. Set either this property or
- * {@code docTypePublicId}, not both.
- *
+ * Set the system Id for the DTD declaration written when marshalling.
+ * By default, this is {@code null}. Only used when the root element also has been set.
+ * <p>Set either this property or {@code docTypePublicId}, not both.
* @see #setDocTypeRootElementName(String)
*/
public void setDocTypeSystemId(String docTypeSystemId) {
@@ -180,10 +177,9 @@ public void setDocTypeSystemId(String docTypeSystemId) {
}
/**
- * Sets the public Id for the DTD declaration written when marshalling. By default, this is
- * {@code null}. Only used when the root element also has been set. Set either this property or
- * {@code docTypeSystemId}, not both.
- *
+ * Set the public Id for the DTD declaration written when marshalling.
+ * By default, this is {@code null}. Only used when the root element also has been set.
+ * <p>Set either this property or {@code docTypeSystemId}, not both.
* @see #setDocTypeRootElementName(String)
*/
public void setDocTypePublicId(String docTypePublicId) {
@@ -191,15 +187,15 @@ public void setDocTypePublicId(String docTypePublicId) {
}
/**
- * Sets the internal subset Id for the DTD declaration written when marshalling. By default, this is
- * {@code null}. Only used when the root element also has been set.
- *
+ * Set the internal subset Id for the DTD declaration written when marshalling.
+ * By default, this is {@code null}. Only used when the root element also has been set.
* @see #setDocTypeRootElementName(String)
*/
public void setDocTypeInternalSubset(String docTypeInternalSubset) {
this.docTypeInternalSubset = docTypeInternalSubset;
}
+
@Override
public void afterPropertiesSet() throws JiBXException {
if (this.targetClass != null) {
@@ -215,7 +211,8 @@ public void afterPropertiesSet() throws JiBXException {
}
this.bindingFactory = BindingDirectory.getFactory(this.targetClass);
}
- } else if (this.targetPackage != null) {
+ }
+ else if (this.targetPackage != null) {
if (!StringUtils.hasLength(bindingName)) {
bindingName = DEFAULT_BINDING_NAME;
}
@@ -223,7 +220,8 @@ public void afterPropertiesSet() throws JiBXException {
logger.info("Configured for target package [" + targetPackage + "] using binding [" + bindingName + "]");
}
this.bindingFactory = BindingDirectory.getFactory(bindingName, targetPackage);
- } else {
+ }
+ else {
throw new IllegalArgumentException("either 'targetClass' or 'targetPackage' is required");
}
}
@@ -246,7 +244,7 @@ public boolean supports(Class<?> clazz) {
}
- // Supported Marshalling
+ // Supported marshalling
@Override
protected void marshalOutputStream(Object graph, OutputStream outputStream)
@@ -273,8 +271,7 @@ protected void marshalWriter(Object graph, Writer writer) throws XmlMappingExcep
}
}
- private void marshalDocument(IMarshallingContext marshallingContext, Object graph) throws IOException,
- JiBXException {
+ private void marshalDocument(IMarshallingContext marshallingContext, Object graph) throws IOException, JiBXException {
if (StringUtils.hasLength(docTypeRootElementName)) {
IXMLWriter xmlWriter = marshallingContext.getXmlWriter();
xmlWriter.writeDocType(docTypeRootElementName, docTypeSystemId, docTypePublicId, docTypeInternalSubset);
@@ -282,20 +279,8 @@ private void marshalDocument(IMarshallingContext marshallingContext, Object grap
marshallingContext.marshalDocument(graph);
}
- @Override
- protected void marshalXmlStreamWriter(Object graph, XMLStreamWriter streamWriter) throws XmlMappingException {
- try {
- MarshallingContext marshallingContext = (MarshallingContext) createMarshallingContext();
- IXMLWriter xmlWriter = new StAXWriter(marshallingContext.getNamespaces(), streamWriter);
- marshallingContext.setXmlWriter(xmlWriter);
- marshallingContext.marshalDocument(graph);
- }
- catch (JiBXException ex) {
- throw convertJibxException(ex, false);
- }
- }
- // Unsupported Marshalling
+ // Unsupported marshalling
@Override
protected void marshalDomNode(Object graph, Node node) throws XmlMappingException {
@@ -309,6 +294,25 @@ protected void marshalDomNode(Object graph, Node node) throws XmlMappingExceptio
}
}
+ @Override
+ protected void marshalXmlEventWriter(Object graph, XMLEventWriter eventWriter) {
+ XMLStreamWriter streamWriter = StaxUtils.createEventStreamWriter(eventWriter);
+ marshalXmlStreamWriter(graph, streamWriter);
+ }
+
+ @Override
+ protected void marshalXmlStreamWriter(Object graph, XMLStreamWriter streamWriter) throws XmlMappingException {
+ try {
+ MarshallingContext marshallingContext = (MarshallingContext) createMarshallingContext();
+ IXMLWriter xmlWriter = new StAXWriter(marshallingContext.getNamespaces(), streamWriter);
+ marshallingContext.setXmlWriter(xmlWriter);
+ marshallingContext.marshalDocument(graph);
+ }
+ catch (JiBXException ex) {
+ throw convertJibxException(ex, false);
+ }
+ }
+
@Override
protected void marshalSaxHandlers(Object graph, ContentHandler contentHandler, LexicalHandler lexicalHandler)
throws XmlMappingException {
@@ -338,31 +342,27 @@ private void transformAndMarshal(Object graph, Result result) throws IOException
}
- @Override
- protected void marshalXmlEventWriter(Object graph, XMLEventWriter eventWriter) {
- XMLStreamWriter streamWriter = StaxUtils.createEventStreamWriter(eventWriter);
- marshalXmlStreamWriter(graph, streamWriter);
- }
-
// Unmarshalling
@Override
- protected Object unmarshalInputStream(InputStream inputStream) throws XmlMappingException, IOException {
+ protected Object unmarshalXmlEventReader(XMLEventReader eventReader) {
try {
- IUnmarshallingContext unmarshallingContext = createUnmarshallingContext();
- return unmarshallingContext.unmarshalDocument(inputStream, encoding);
+ XMLStreamReader streamReader = StaxUtils.createEventStreamReader(eventReader);
+ return unmarshalXmlStreamReader(streamReader);
}
- catch (JiBXException ex) {
- throw convertJibxException(ex, false);
+ catch (XMLStreamException ex) {
+ return new UnmarshallingFailureException("JiBX unmarshalling exception", ex);
}
}
@Override
- protected Object unmarshalReader(Reader reader) throws XmlMappingException, IOException {
+ protected Object unmarshalXmlStreamReader(XMLStreamReader streamReader) {
try {
- IUnmarshallingContext unmarshallingContext = createUnmarshallingContext();
- return unmarshallingContext.unmarshalDocument(reader);
+ UnmarshallingContext unmarshallingContext = (UnmarshallingContext) createUnmarshallingContext();
+ IXMLReader xmlReader = new StAXReaderWrapper(streamReader, null, true);
+ unmarshallingContext.setDocument(xmlReader);
+ return unmarshallingContext.unmarshalElement();
}
catch (JiBXException ex) {
throw convertJibxException(ex, false);
@@ -370,12 +370,10 @@ protected Object unmarshalReader(Reader reader) throws XmlMappingException, IOEx
}
@Override
- protected Object unmarshalXmlStreamReader(XMLStreamReader streamReader) {
+ protected Object unmarshalInputStream(InputStream inputStream) throws XmlMappingException, IOException {
try {
- UnmarshallingContext unmarshallingContext = (UnmarshallingContext) createUnmarshallingContext();
- IXMLReader xmlReader = new StAXReaderWrapper(streamReader, null, true);
- unmarshallingContext.setDocument(xmlReader);
- return unmarshallingContext.unmarshalElement();
+ IUnmarshallingContext unmarshallingContext = createUnmarshallingContext();
+ return unmarshallingContext.unmarshalDocument(inputStream, encoding);
}
catch (JiBXException ex) {
throw convertJibxException(ex, false);
@@ -383,13 +381,13 @@ protected Object unmarshalXmlStreamReader(XMLStreamReader streamReader) {
}
@Override
- protected Object unmarshalXmlEventReader(XMLEventReader eventReader) {
+ protected Object unmarshalReader(Reader reader) throws XmlMappingException, IOException {
try {
- XMLStreamReader streamReader = StaxUtils.createEventStreamReader(eventReader);
- return unmarshalXmlStreamReader(streamReader);
+ IUnmarshallingContext unmarshallingContext = createUnmarshallingContext();
+ return unmarshallingContext.unmarshalDocument(reader);
}
- catch (XMLStreamException ex) {
- return new UnmarshallingFailureException("JiBX unmarshalling exception", ex);
+ catch (JiBXException ex) {
+ throw convertJibxException(ex, false);
}
}
diff --git a/spring-oxm/src/main/java/org/springframework/oxm/support/AbstractMarshaller.java b/spring-oxm/src/main/java/org/springframework/oxm/support/AbstractMarshaller.java
index d42cb72a49f6..a11877513798 100644
--- a/spring-oxm/src/main/java/org/springframework/oxm/support/AbstractMarshaller.java
+++ b/spring-oxm/src/main/java/org/springframework/oxm/support/AbstractMarshaller.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2012 the original author or authors.
+ * Copyright 2002-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -170,7 +170,7 @@ protected DocumentBuilderFactory createDocumentBuilderFactory() throws ParserCon
}
/**
- * Create a {@code XMLReader} that this marshaller will when passed an empty {@code SAXSource}.
+ * Create an {@code XMLReader} that this marshaller will when passed an empty {@code SAXSource}.
* @return the XMLReader
* @throws SAXException if thrown by JAXP methods
*/
@@ -184,7 +184,7 @@ protected XMLReader createXmlReader() throws SAXException {
/**
* Template method for handling {@code DOMResult}s.
* <p>This implementation delegates to {@code marshalDomNode}.
- * @param graph the root of the object graph to marshal
+ * @param graph the root of the object graph to marshal
* @param domResult the {@code DOMResult}
* @throws XmlMappingException if the given object cannot be marshalled to the result
* @throws IllegalArgumentException if the {@code domResult} is empty
@@ -214,8 +214,8 @@ protected void marshalDomResult(Object graph, DOMResult domResult) throws XmlMap
* <p>This implementation delegates to {@code marshalXMLSteamWriter} or
* {@code marshalXMLEventConsumer}, depending on what is contained in the
* {@code StaxResult}.
- * @param graph the root of the object graph to marshal
- * @param staxResult a Spring {@link org.springframework.util.xml.StaxSource} or JAXP 1.4 {@link StAXSource}
+ * @param graph the root of the object graph to marshal
+ * @param staxResult a JAXP 1.4 {@link StAXSource}
* @throws XmlMappingException if the given object cannot be marshalled to the result
* @throws IllegalArgumentException if the {@code domResult} is empty
* @see #marshalDomNode(Object, org.w3c.dom.Node)
@@ -239,7 +239,7 @@ protected void marshalStaxResult(Object graph, Result staxResult) throws XmlMapp
/**
* Template method for handling {@code SAXResult}s.
* <p>This implementation delegates to {@code marshalSaxHandlers}.
- * @param graph the root of the object graph to marshal
+ * @param graph the root of the object graph to marshal
* @param saxResult the {@code SAXResult}
* @throws XmlMappingException if the given object cannot be marshalled to the result
* @see #marshalSaxHandlers(Object, org.xml.sax.ContentHandler, org.xml.sax.ext.LexicalHandler)
@@ -396,7 +396,7 @@ protected abstract void marshalDomNode(Object graph, Node node)
/**
* Abstract template method for marshalling the given object to a StAX {@code XMLEventWriter}.
- * @param graph the root of the object graph to marshal
+ * @param graph the root of the object graph to marshal
* @param eventWriter the {@code XMLEventWriter} to write to
* @throws XmlMappingException if the given object cannot be marshalled to the DOM node
*/
@@ -412,16 +412,6 @@ protected abstract void marshalXmlEventWriter(Object graph, XMLEventWriter event
protected abstract void marshalXmlStreamWriter(Object graph, XMLStreamWriter streamWriter)
throws XmlMappingException;
- /**
- * Abstract template method for marshalling the given object graph to a {@code OutputStream}.
- * @param graph the root of the object graph to marshal
- * @param outputStream the {@code OutputStream} to write to
- * @throws XmlMappingException if the given object cannot be marshalled to the writer
- * @throws IOException if an I/O exception occurs
- */
- protected abstract void marshalOutputStream(Object graph, OutputStream outputStream)
- throws XmlMappingException, IOException;
-
/**
* Abstract template method for marshalling the given object graph to a SAX {@code ContentHandler}.
* @param graph the root of the object graph to marshal
@@ -433,6 +423,16 @@ protected abstract void marshalSaxHandlers(
Object graph, ContentHandler contentHandler, LexicalHandler lexicalHandler)
throws XmlMappingException;
+ /**
+ * Abstract template method for marshalling the given object graph to a {@code OutputStream}.
+ * @param graph the root of the object graph to marshal
+ * @param outputStream the {@code OutputStream} to write to
+ * @throws XmlMappingException if the given object cannot be marshalled to the writer
+ * @throws IOException if an I/O exception occurs
+ */
+ protected abstract void marshalOutputStream(Object graph, OutputStream outputStream)
+ throws XmlMappingException, IOException;
+
/**
* Abstract template method for marshalling the given object graph to a {@code Writer}.
* @param graph the root of the object graph to marshal
@@ -443,6 +443,7 @@ protected abstract void marshalSaxHandlers(
protected abstract void marshalWriter(Object graph, Writer writer)
throws XmlMappingException, IOException;
+
/**
* Abstract template method for unmarshalling from a given DOM {@code Node}.
* @param node the DOM node that contains the objects to be unmarshalled
@@ -469,6 +470,18 @@ protected abstract Object unmarshalXmlEventReader(XMLEventReader eventReader)
protected abstract Object unmarshalXmlStreamReader(XMLStreamReader streamReader)
throws XmlMappingException;
+ /**
+ * Abstract template method for unmarshalling using a given SAX {@code XMLReader}
+ * and {@code InputSource}.
+ * @param xmlReader the SAX {@code XMLReader} to parse with
+ * @param inputSource the input source to parse from
+ * @return the object graph
+ * @throws XmlMappingException if the given reader and input source cannot be converted to an object
+ * @throws IOException if an I/O exception occurs
+ */
+ protected abstract Object unmarshalSaxReader(XMLReader xmlReader, InputSource inputSource)
+ throws XmlMappingException, IOException;
+
/**
* Abstract template method for unmarshalling from a given {@code InputStream}.
* @param inputStream the {@code InputStreamStream} to read from
@@ -489,16 +502,4 @@ protected abstract Object unmarshalInputStream(InputStream inputStream)
protected abstract Object unmarshalReader(Reader reader)
throws XmlMappingException, IOException;
- /**
- * Abstract template method for unmarshalling using a given SAX {@code XMLReader}
- * and {@code InputSource}.
- * @param xmlReader the SAX {@code XMLReader} to parse with
- * @param inputSource the input source to parse from
- * @return the object graph
- * @throws XmlMappingException if the given reader and input source cannot be converted to an object
- * @throws IOException if an I/O exception occurs
- */
- protected abstract Object unmarshalSaxReader(XMLReader xmlReader, InputSource inputSource)
- throws XmlMappingException, IOException;
-
}
diff --git a/spring-oxm/src/main/java/org/springframework/oxm/xmlbeans/XmlBeansMarshaller.java b/spring-oxm/src/main/java/org/springframework/oxm/xmlbeans/XmlBeansMarshaller.java
index 83a463936303..780f5efa83d7 100644
--- a/spring-oxm/src/main/java/org/springframework/oxm/xmlbeans/XmlBeansMarshaller.java
+++ b/spring-oxm/src/main/java/org/springframework/oxm/xmlbeans/XmlBeansMarshaller.java
@@ -122,8 +122,9 @@ public boolean supports(Class<?> clazz) {
return XmlObject.class.isAssignableFrom(clazz);
}
+
@Override
- protected final void marshalDomNode(Object graph, Node node) throws XmlMappingException {
+ protected void marshalDomNode(Object graph, Node node) throws XmlMappingException {
Document document = node.getNodeType() == Node.DOCUMENT_NODE ? (Document) node : node.getOwnerDocument();
Node xmlBeansNode = ((XmlObject) graph).newDomNode(getXmlOptions());
NodeList xmlBeansChildNodes = xmlBeansNode.getChildNodes();
@@ -135,14 +136,19 @@ protected final void marshalDomNode(Object graph, Node node) throws XmlMappingEx
}
@Override
- protected final void marshalOutputStream(Object graph, OutputStream outputStream)
- throws XmlMappingException, IOException {
+ protected void marshalXmlEventWriter(Object graph, XMLEventWriter eventWriter) {
+ ContentHandler contentHandler = StaxUtils.createContentHandler(eventWriter);
+ marshalSaxHandlers(graph, contentHandler, null);
+ }
- ((XmlObject) graph).save(outputStream, getXmlOptions());
+ @Override
+ protected void marshalXmlStreamWriter(Object graph, XMLStreamWriter streamWriter) throws XmlMappingException {
+ ContentHandler contentHandler = StaxUtils.createContentHandler(streamWriter);
+ marshalSaxHandlers(graph, contentHandler, null);
}
@Override
- protected final void marshalSaxHandlers(Object graph, ContentHandler contentHandler, LexicalHandler lexicalHandler)
+ protected void marshalSaxHandlers(Object graph, ContentHandler contentHandler, LexicalHandler lexicalHandler)
throws XmlMappingException {
try {
((XmlObject) graph).save(contentHandler, lexicalHandler, getXmlOptions());
@@ -153,24 +159,20 @@ protected final void marshalSaxHandlers(Object graph, ContentHandler contentHand
}
@Override
- protected final void marshalWriter(Object graph, Writer writer) throws XmlMappingException, IOException {
- ((XmlObject) graph).save(writer, getXmlOptions());
- }
+ protected void marshalOutputStream(Object graph, OutputStream outputStream)
+ throws XmlMappingException, IOException {
- @Override
- protected final void marshalXmlEventWriter(Object graph, XMLEventWriter eventWriter) {
- ContentHandler contentHandler = StaxUtils.createContentHandler(eventWriter);
- marshalSaxHandlers(graph, contentHandler, null);
+ ((XmlObject) graph).save(outputStream, getXmlOptions());
}
@Override
- protected final void marshalXmlStreamWriter(Object graph, XMLStreamWriter streamWriter) throws XmlMappingException {
- ContentHandler contentHandler = StaxUtils.createContentHandler(streamWriter);
- marshalSaxHandlers(graph, contentHandler, null);
+ protected void marshalWriter(Object graph, Writer writer) throws XmlMappingException, IOException {
+ ((XmlObject) graph).save(writer, getXmlOptions());
}
+
@Override
- protected final Object unmarshalDomNode(Node node) throws XmlMappingException {
+ protected Object unmarshalDomNode(Node node) throws XmlMappingException {
try {
XmlObject object = XmlObject.Factory.parse(node, getXmlOptions());
validate(object);
@@ -182,23 +184,20 @@ protected final Object unmarshalDomNode(Node node) throws XmlMappingException {
}
@Override
- protected final Object unmarshalInputStream(InputStream inputStream) throws XmlMappingException, IOException {
+ protected Object unmarshalXmlEventReader(XMLEventReader eventReader) throws XmlMappingException {
+ XMLReader reader = StaxUtils.createXMLReader(eventReader);
try {
- InputStream nonClosingInputStream = new NonClosingInputStream(inputStream);
- XmlObject object = XmlObject.Factory.parse(nonClosingInputStream, getXmlOptions());
- validate(object);
- return object;
+ return unmarshalSaxReader(reader, new InputSource());
}
- catch (XmlException ex) {
+ catch (IOException ex) {
throw convertXmlBeansException(ex, false);
}
}
@Override
- protected final Object unmarshalReader(Reader reader) throws XmlMappingException, IOException {
+ protected Object unmarshalXmlStreamReader(XMLStreamReader streamReader) throws XmlMappingException {
try {
- Reader nonClosingReader = new NonClosingReader(reader);
- XmlObject object = XmlObject.Factory.parse(nonClosingReader, getXmlOptions());
+ XmlObject object = XmlObject.Factory.parse(streamReader, getXmlOptions());
validate(object);
return object;
}
@@ -208,17 +207,18 @@ protected final Object unmarshalReader(Reader reader) throws XmlMappingException
}
@Override
- protected final Object unmarshalSaxReader(XMLReader xmlReader, InputSource inputSource)
+ protected Object unmarshalSaxReader(XMLReader xmlReader, InputSource inputSource)
throws XmlMappingException, IOException {
+
XmlSaxHandler saxHandler = XmlObject.Factory.newXmlSaxHandler(getXmlOptions());
xmlReader.setContentHandler(saxHandler.getContentHandler());
try {
xmlReader.setProperty("http://xml.org/sax/properties/lexical-handler", saxHandler.getLexicalHandler());
}
- catch (SAXNotRecognizedException e) {
+ catch (SAXNotRecognizedException ex) {
// ignore
}
- catch (SAXNotSupportedException e) {
+ catch (SAXNotSupportedException ex) {
// ignore
}
try {
@@ -236,20 +236,23 @@ protected final Object unmarshalSaxReader(XMLReader xmlReader, InputSource input
}
@Override
- protected final Object unmarshalXmlEventReader(XMLEventReader eventReader) throws XmlMappingException {
- XMLReader reader = StaxUtils.createXMLReader(eventReader);
+ protected Object unmarshalInputStream(InputStream inputStream) throws XmlMappingException, IOException {
try {
- return unmarshalSaxReader(reader, new InputSource());
+ InputStream nonClosingInputStream = new NonClosingInputStream(inputStream);
+ XmlObject object = XmlObject.Factory.parse(nonClosingInputStream, getXmlOptions());
+ validate(object);
+ return object;
}
- catch (IOException ex) {
+ catch (XmlException ex) {
throw convertXmlBeansException(ex, false);
}
}
@Override
- protected final Object unmarshalXmlStreamReader(XMLStreamReader streamReader) throws XmlMappingException {
+ protected Object unmarshalReader(Reader reader) throws XmlMappingException, IOException {
try {
- XmlObject object = XmlObject.Factory.parse(streamReader, getXmlOptions());
+ Reader nonClosingReader = new NonClosingReader(reader);
+ XmlObject object = XmlObject.Factory.parse(nonClosingReader, getXmlOptions());
validate(object);
return object;
}
@@ -312,6 +315,7 @@ else if (ex instanceof XmlException || ex instanceof SAXException) {
}
}
+
/**
* See SPR-7034
*/
@@ -388,6 +392,7 @@ public void close() throws IOException {
}
}
+
private static class NonClosingReader extends Reader {
private final WeakReference<Reader> reader;
|
4becfb06985f23704a42a9699963f5bbe0366e7a
|
intellij-community
|
Fixed ipnb editor layout.--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/src/org/jetbrains/plugins/ipnb/editor/IpnbEditorUtil.java b/src/org/jetbrains/plugins/ipnb/editor/IpnbEditorUtil.java
index 1ee82a6327b02..6ffcceffb3a8c 100644
--- a/src/org/jetbrains/plugins/ipnb/editor/IpnbEditorUtil.java
+++ b/src/org/jetbrains/plugins/ipnb/editor/IpnbEditorUtil.java
@@ -17,13 +17,21 @@
import com.google.common.collect.Lists;
import com.intellij.execution.impl.ConsoleViewUtil;
+import com.intellij.ide.ui.UISettings;
+import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorFactory;
+import com.intellij.openapi.editor.EditorSettings;
+import com.intellij.openapi.editor.colors.EditorColors;
import com.intellij.openapi.editor.colors.EditorColorsManager;
+import com.intellij.openapi.editor.colors.EditorColorsScheme;
+import com.intellij.openapi.editor.colors.impl.DelegateColorScheme;
import com.intellij.openapi.editor.ex.EditorEx;
+import com.intellij.openapi.editor.impl.softwrap.SoftWrapAppliancePlaces;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiDocumentManager;
+import com.intellij.ui.JBColor;
import com.intellij.util.ui.UIUtil;
import com.jetbrains.python.PythonFileType;
import com.jetbrains.python.psi.impl.PyExpressionCodeFragmentImpl;
@@ -40,7 +48,8 @@
public class IpnbEditorUtil {
public static Editor createPythonCodeEditor(@NotNull Project project, @NotNull String text) {
- EditorEx editor = (EditorEx)EditorFactory.getInstance().createEditor(createPythonCodeDocument(project, text), project, PythonFileType.INSTANCE, false);
+ EditorEx editor =
+ (EditorEx)EditorFactory.getInstance().createEditor(createPythonCodeDocument(project, text), project, PythonFileType.INSTANCE, false);
noScrolling(editor);
ConsoleViewUtil.setupConsoleEditor(editor, false, false);
return editor;
@@ -50,7 +59,7 @@ private static void noScrolling(EditorEx editor) {
editor.getScrollPane().setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_NEVER);
editor.getScrollPane().setWheelScrollingEnabled(false);
List<MouseWheelListener> listeners = Lists.newArrayList(editor.getScrollPane().getMouseWheelListeners());
- for (MouseWheelListener l: listeners) {
+ for (MouseWheelListener l : listeners) {
editor.getScrollPane().removeMouseWheelListener(l);
}
}
@@ -64,17 +73,12 @@ public static Document createPythonCodeDocument(@NotNull final Project project,
return PsiDocumentManager.getInstance(project).getDocument(fragment);
}
- public static JPanel createPanelWithPrompt(@NotNull String promptText, @NotNull final JComponent component) {
- JPanel container = new JPanel(new BorderLayout());
- JPanel p = new JPanel(new BorderLayout());
- p.add(new JLabel(promptText), BorderLayout.WEST);
- container.add(p, BorderLayout.NORTH);
-
- container.add(component, BorderLayout.CENTER);
-
- p.setBackground(getBackground());
-
- return container;
+ public static JComponent createPromptPanel(@NotNull String promptText) {
+ JLabel promptLabel = new JLabel(promptText);
+ promptLabel.setFont(promptLabel.getFont().deriveFont(Font.BOLD));
+ promptLabel.setForeground(JBColor.BLUE);
+ promptLabel.setBackground(getBackground());
+ return promptLabel;
}
public static Color getBackground() {
diff --git a/src/org/jetbrains/plugins/ipnb/editor/panels/CodeOutputPanel.java b/src/org/jetbrains/plugins/ipnb/editor/panels/CodeOutputPanel.java
index ba75c4781d3e1..4588b7fcde317 100644
--- a/src/org/jetbrains/plugins/ipnb/editor/panels/CodeOutputPanel.java
+++ b/src/org/jetbrains/plugins/ipnb/editor/panels/CodeOutputPanel.java
@@ -2,26 +2,14 @@
import com.intellij.openapi.project.Project;
import org.jetbrains.annotations.NotNull;
-import org.jetbrains.plugins.ipnb.editor.IpnbEditorUtil;
-import org.jetbrains.plugins.ipnb.format.cells.output.CellOutput;
import javax.swing.*;
import java.awt.*;
public class CodeOutputPanel extends JPanel {
- private final Project myProject;
+ public CodeOutputPanel(@NotNull String outputText) {
+ super(new BorderLayout());
- public CodeOutputPanel(@NotNull final Project project, @NotNull final CellOutput cell, int promptNumber) {
- myProject = project;
- setLayout(new BoxLayout(this, BoxLayout.Y_AXIS));
-
- final String text = cell.getSourceAsString();
- if (text != null) {
- add(IpnbEditorUtil.createPanelWithPrompt(outputPrompt(promptNumber), new JTextArea(text)));
- }
- }
-
- private String outputPrompt(int promptNumber) {
- return String.format("Out[%d]:", promptNumber);
+ add(new JTextArea(outputText), BorderLayout.CENTER);
}
}
diff --git a/src/org/jetbrains/plugins/ipnb/editor/panels/CodeSourcePanel.java b/src/org/jetbrains/plugins/ipnb/editor/panels/CodeSourcePanel.java
index 15f6540f244ec..afdb85cd491a0 100644
--- a/src/org/jetbrains/plugins/ipnb/editor/panels/CodeSourcePanel.java
+++ b/src/org/jetbrains/plugins/ipnb/editor/panels/CodeSourcePanel.java
@@ -20,6 +20,7 @@
import org.jetbrains.plugins.ipnb.editor.IpnbEditorUtil;
import javax.swing.*;
+import java.awt.*;
/**
* @author traff
@@ -27,16 +28,10 @@
public class CodeSourcePanel extends JPanel implements EditorPanel {
private final Editor myEditor;
- public CodeSourcePanel(Project project, String source, int promptNumber) {
- setLayout(new BoxLayout(this, BoxLayout.Y_AXIS));
-
+ public CodeSourcePanel(Project project, String source) {
+ super(new BorderLayout());
myEditor = IpnbEditorUtil.createPythonCodeEditor(project, source);
-
- add(IpnbEditorUtil.createPanelWithPrompt(inputPrompt(promptNumber), myEditor.getComponent()));
- }
-
- private String inputPrompt(int promptNumber) {
- return String.format("In[%d]:", promptNumber);
+ add(myEditor.getComponent(), BorderLayout.CENTER);
}
@Override
diff --git a/src/org/jetbrains/plugins/ipnb/editor/panels/IpnbFilePanel.java b/src/org/jetbrains/plugins/ipnb/editor/panels/IpnbFilePanel.java
index 4286af11c80fc..f1ae99f7bb13d 100644
--- a/src/org/jetbrains/plugins/ipnb/editor/panels/IpnbFilePanel.java
+++ b/src/org/jetbrains/plugins/ipnb/editor/panels/IpnbFilePanel.java
@@ -16,7 +16,6 @@
package org.jetbrains.plugins.ipnb.editor.panels;
import com.intellij.openapi.Disposable;
-import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.project.Project;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
@@ -38,54 +37,75 @@ public class IpnbFilePanel extends JPanel {
public IpnbFilePanel(@NotNull Project project, @Nullable Disposable parent, @NotNull IpnbFile file) {
super();
setLayout(new GridBagLayout());
+ setBackground(IpnbEditorUtil.getBackground());
- int row = 0;
GridBagConstraints c = new GridBagConstraints();
c.fill = GridBagConstraints.HORIZONTAL;
c.anchor = GridBagConstraints.PAGE_START;
- c.gridx = 0;
+ c.gridx = 1;
+ c.gridy = 0;
+ c.gridwidth = 1;
+
+ c.insets = new Insets(10, 0, 0, 0);
for (IpnbCell cell : file.getCells()) {
- JPanel panel = createPanelForCell(project, cell);
- c.gridy = row;
- row++;
- add(panel, c);
- if (cell instanceof CodeCell) {
- for (CellOutput cellOutput : ((CodeCell)cell).getCellOutputs()) {
- final JPanel outputPanel = createPanelForCellOutput(project, cellOutput, ((CodeCell)cell).getPromptNumber());
- c.gridy = row;
- row++;
- add(outputPanel, c);
- }
- }
+ c.gridy = addCellToPanel(project, cell, c);
}
c.weighty = 1;
add(createEmptyPanel(), c);
}
- private JPanel createEmptyPanel() {
- JPanel panel = new JPanel();
- panel.setBackground(IpnbEditorUtil.getBackground());
- return panel;
+ protected static String prompt(int promptNumber, String type) {
+ return String.format(type + "[%d]:", promptNumber);
}
- private JPanel createPanelForCellOutput(@NotNull final Project project, @NotNull final CellOutput cell, int number) {
- return new CodeOutputPanel(project, cell, number);
+ public static void addPromptPanel(JComponent container,
+ int promptNumber,
+ String promptType,
+ JComponent component,
+ GridBagConstraints c) {
+ c.gridx = 0;
+ container.add(IpnbEditorUtil.createPromptPanel(prompt(promptNumber, promptType)), c);
+ c.gridx = 1;
+ container.add(component, c);
}
- private JPanel createPanelForCell(@NotNull final Project project, @NotNull final IpnbCell cell) {
+ private int addCellToPanel(Project project, IpnbCell cell, GridBagConstraints c) {
if (cell instanceof CodeCell) {
- return new CodeSourcePanel(project, ((CodeCell)cell).getSourceAsString(), ((CodeCell)cell).getPromptNumber());
+ c.gridwidth = 2;
+ c.gridx = 0;
+
+ CodeCell codeCell = (CodeCell)cell;
+
+ addPromptPanel(this, codeCell.getPromptNumber(), "In", new CodeSourcePanel(project, codeCell.getSourceAsString()), c);
+
+ c.gridx = 1;
+ c.gridwidth = 1;
+
+ for (CellOutput cellOutput : codeCell.getCellOutputs()) {
+ c.gridy++;
+ if (cellOutput.getSourceAsString() != null) {
+ addPromptPanel(this, codeCell.getPromptNumber(), "Out",
+ new CodeOutputPanel(cellOutput.getSourceAsString()), c);
+ }
+ }
}
else if (cell instanceof MarkdownCell) {
- return new MarkdownPanel(project, (MarkdownCell)cell);
+ add(new MarkdownPanel(project, (MarkdownCell)cell), c);
}
else if (cell instanceof HeadingCell) {
- return new HeadingPanel(project, (HeadingCell)cell);
+ add(new HeadingPanel(project, (HeadingCell)cell), c);
}
else {
throw new UnsupportedOperationException(cell.getClass().toString());
}
+ return c.gridy + 1;
+ }
+
+ private JPanel createEmptyPanel() {
+ JPanel panel = new JPanel();
+ panel.setBackground(IpnbEditorUtil.getBackground());
+ return panel;
}
}
|
8e9ab4143d260585d73890c6d691b18f865bd2a8
|
intellij-community
|
disable pattern configuration for the same- named packages (IDEA-151250)--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/java/execution/impl/src/com/intellij/execution/testframework/AbstractPatternBasedConfigurationProducer.java b/java/execution/impl/src/com/intellij/execution/testframework/AbstractPatternBasedConfigurationProducer.java
index 96b38a73ed650..019e96608ce0d 100644
--- a/java/execution/impl/src/com/intellij/execution/testframework/AbstractPatternBasedConfigurationProducer.java
+++ b/java/execution/impl/src/com/intellij/execution/testframework/AbstractPatternBasedConfigurationProducer.java
@@ -218,7 +218,7 @@ private boolean collectTestMembers(PsiElement[] elements,
for (PsiElement psiClass : processor.getCollection()) {
classes.add(getQName(psiClass));
}
- return true;
+ return classes.size() > 1;
}
private static PsiElement[] collectLocationElements(LinkedHashSet<String> classes, DataContext dataContext) {
|
3fbdf05921125b2bff7e4b914e9e060814010a6f
|
kotlin
|
Added some new test for java8--
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/java8-tests/tests/org/jetbrains/kotlin/codegen/generated/BlackBoxWithJava8CodegenTestGenerated.java b/compiler/java8-tests/tests/org/jetbrains/kotlin/codegen/generated/BlackBoxWithJava8CodegenTestGenerated.java
index 6bae70acab423..4e17e4b4d7a76 100644
--- a/compiler/java8-tests/tests/org/jetbrains/kotlin/codegen/generated/BlackBoxWithJava8CodegenTestGenerated.java
+++ b/compiler/java8-tests/tests/org/jetbrains/kotlin/codegen/generated/BlackBoxWithJava8CodegenTestGenerated.java
@@ -35,9 +35,15 @@ public void testAllFilesPresentInBoxWithJava() throws Exception {
JetTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/java8/boxWithJava"), Pattern.compile("^([^\\.]+)$"), true);
}
- @TestMetadata("defaultMethodCall")
- public void testDefaultMethodCall() throws Exception {
- String fileName = JetTestUtils.navigationMetadata("compiler/testData/codegen/java8/boxWithJava/defaultMethodCall/");
+ @TestMetadata("defaultMethodCallFromTrait")
+ public void testDefaultMethodCallFromTrait() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("compiler/testData/codegen/java8/boxWithJava/defaultMethodCallFromTrait/");
+ doTestWithJava(fileName);
+ }
+
+ @TestMetadata("defaultMethodCallViaClass")
+ public void testDefaultMethodCallViaClass() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("compiler/testData/codegen/java8/boxWithJava/defaultMethodCallViaClass/");
doTestWithJava(fileName);
}
@@ -47,12 +53,24 @@ public void testDefaultMethodCallViaTrait() throws Exception {
doTestWithJava(fileName);
}
+ @TestMetadata("defaultMethodOverride")
+ public void testDefaultMethodOverride() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("compiler/testData/codegen/java8/boxWithJava/defaultMethodOverride/");
+ doTestWithJava(fileName);
+ }
+
@TestMetadata("dontDelegateToDefaultMethods")
public void testDontDelegateToDefaultMethods() throws Exception {
String fileName = JetTestUtils.navigationMetadata("compiler/testData/codegen/java8/boxWithJava/dontDelegateToDefaultMethods/");
doTestWithJava(fileName);
}
+ @TestMetadata("inheritKotlin")
+ public void testInheritKotlin() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("compiler/testData/codegen/java8/boxWithJava/inheritKotlin/");
+ doTestWithJava(fileName);
+ }
+
@TestMetadata("samOnInterfaceWithDefaultMethod")
public void testSamOnInterfaceWithDefaultMethod() throws Exception {
String fileName = JetTestUtils.navigationMetadata("compiler/testData/codegen/java8/boxWithJava/samOnInterfaceWithDefaultMethod/");
diff --git a/compiler/testData/codegen/java8/boxWithJava/defaultMethodCall/Simple.java b/compiler/testData/codegen/java8/boxWithJava/defaultMethodCallFromTrait/Simple.java
similarity index 100%
rename from compiler/testData/codegen/java8/boxWithJava/defaultMethodCall/Simple.java
rename to compiler/testData/codegen/java8/boxWithJava/defaultMethodCallFromTrait/Simple.java
diff --git a/compiler/testData/codegen/java8/boxWithJava/defaultMethodCallFromTrait/defaultCall.kt b/compiler/testData/codegen/java8/boxWithJava/defaultMethodCallFromTrait/defaultCall.kt
new file mode 100644
index 0000000000000..55133e938783c
--- /dev/null
+++ b/compiler/testData/codegen/java8/boxWithJava/defaultMethodCallFromTrait/defaultCall.kt
@@ -0,0 +1,14 @@
+trait KTrait : Simple {
+ fun bar(): String {
+ return test("O") + Simple.testStatic("O")
+ }
+}
+
+class Test : KTrait {}
+
+fun box(): String {
+ val test = Test().bar()
+ if (test != "OKOK") return "fail $test"
+
+ return "OK"
+}
\ No newline at end of file
diff --git a/compiler/testData/codegen/java8/boxWithJava/defaultMethodCallViaClass/Simple.java b/compiler/testData/codegen/java8/boxWithJava/defaultMethodCallViaClass/Simple.java
new file mode 100644
index 0000000000000..9e37ab676594c
--- /dev/null
+++ b/compiler/testData/codegen/java8/boxWithJava/defaultMethodCallViaClass/Simple.java
@@ -0,0 +1,9 @@
+interface Simple {
+ default String test(String s) {
+ return s + "K";
+ }
+
+ static String testStatic(String s) {
+ return s + "K";
+ }
+}
\ No newline at end of file
diff --git a/compiler/testData/codegen/java8/boxWithJava/defaultMethodCall/Simple.kt b/compiler/testData/codegen/java8/boxWithJava/defaultMethodCallViaClass/defaultCall.kt
similarity index 100%
rename from compiler/testData/codegen/java8/boxWithJava/defaultMethodCall/Simple.kt
rename to compiler/testData/codegen/java8/boxWithJava/defaultMethodCallViaClass/defaultCall.kt
diff --git a/compiler/testData/codegen/java8/boxWithJava/defaultMethodOverride/Simple.java b/compiler/testData/codegen/java8/boxWithJava/defaultMethodOverride/Simple.java
new file mode 100644
index 0000000000000..552a157fcb70b
--- /dev/null
+++ b/compiler/testData/codegen/java8/boxWithJava/defaultMethodOverride/Simple.java
@@ -0,0 +1,5 @@
+interface Simple {
+ default String test(String s) {
+ return s + "Fail";
+ }
+}
\ No newline at end of file
diff --git a/compiler/testData/codegen/java8/boxWithJava/defaultMethodOverride/override.kt b/compiler/testData/codegen/java8/boxWithJava/defaultMethodOverride/override.kt
new file mode 100644
index 0000000000000..d834a1f60cb93
--- /dev/null
+++ b/compiler/testData/codegen/java8/boxWithJava/defaultMethodOverride/override.kt
@@ -0,0 +1,13 @@
+trait KTrait: Simple {
+ override fun test(s: String): String {
+ return s + "K"
+ }
+}
+
+class Test : KTrait {
+
+}
+
+fun box(): String {
+ return Test().test("O")
+}
\ No newline at end of file
diff --git a/compiler/testData/codegen/java8/boxWithJava/dontDelegateToDefaultMethods/delegation.kt b/compiler/testData/codegen/java8/boxWithJava/dontDelegateToDefaultMethods/dontDelegate.kt
similarity index 100%
rename from compiler/testData/codegen/java8/boxWithJava/dontDelegateToDefaultMethods/delegation.kt
rename to compiler/testData/codegen/java8/boxWithJava/dontDelegateToDefaultMethods/dontDelegate.kt
diff --git a/compiler/testData/codegen/java8/boxWithJava/inheritKotlin/Simple.java b/compiler/testData/codegen/java8/boxWithJava/inheritKotlin/Simple.java
new file mode 100644
index 0000000000000..02c979c1af19a
--- /dev/null
+++ b/compiler/testData/codegen/java8/boxWithJava/inheritKotlin/Simple.java
@@ -0,0 +1,5 @@
+interface Simple extends KTrait {
+ default String test() {
+ return "simple";
+ }
+}
\ No newline at end of file
diff --git a/compiler/testData/codegen/java8/boxWithJava/inheritKotlin/defaultCall.kt b/compiler/testData/codegen/java8/boxWithJava/inheritKotlin/defaultCall.kt
new file mode 100644
index 0000000000000..c9572f45f3731
--- /dev/null
+++ b/compiler/testData/codegen/java8/boxWithJava/inheritKotlin/defaultCall.kt
@@ -0,0 +1,23 @@
+trait KTrait {
+ fun test(): String {
+ return "base";
+ }
+}
+
+class Test : Simple {
+
+ fun bar(): String {
+ return super.test()
+ }
+
+}
+
+fun box(): String {
+ val test = Test().test()
+ if (test != "simple") return "fail $test"
+
+ val bar = Test().bar()
+ if (bar != "simple") return "fail 2 $bar"
+
+ return "OK"
+}
\ No newline at end of file
|
3d6253c27fbba7cb88626eead38ec18449ffd7bf
|
kotlin
|
abstract + data and sealed + data are also- deprecated--
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/ModifiersChecker.kt b/compiler/frontend/src/org/jetbrains/kotlin/resolve/ModifiersChecker.kt
index 9d2f63b05319d..cb2181c4eb52c 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/ModifiersChecker.kt
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/ModifiersChecker.kt
@@ -100,9 +100,11 @@ public object ModifierCheckerCore {
result += incompatibilityRegister(PRIVATE_KEYWORD, PROTECTED_KEYWORD, PUBLIC_KEYWORD, INTERNAL_KEYWORD)
// Abstract + open + final + sealed: incompatible
result += incompatibilityRegister(ABSTRACT_KEYWORD, OPEN_KEYWORD, FINAL_KEYWORD, SEALED_KEYWORD)
- // data + open, data + inner
+ // data + open, data + inner, data + abstract, data + sealed
result += deprecationRegister(DATA_KEYWORD, OPEN_KEYWORD)
result += deprecationRegister(DATA_KEYWORD, INNER_KEYWORD)
+ result += deprecationRegister(DATA_KEYWORD, ABSTRACT_KEYWORD)
+ result += deprecationRegister(DATA_KEYWORD, SEALED_KEYWORD)
// open is redundant to abstract & override
result += redundantRegister(ABSTRACT_KEYWORD, OPEN_KEYWORD)
result += redundantRegister(OVERRIDE_KEYWORD, OPEN_KEYWORD)
diff --git a/compiler/testData/diagnostics/tests/dataClasses/dataInheritance.kt b/compiler/testData/diagnostics/tests/dataClasses/dataInheritance.kt
index 86661a48de69b..febb864722619 100644
--- a/compiler/testData/diagnostics/tests/dataClasses/dataInheritance.kt
+++ b/compiler/testData/diagnostics/tests/dataClasses/dataInheritance.kt
@@ -2,7 +2,7 @@ interface Allowed
open class NotAllowed
-abstract data class Base(val x: Int)
+<!DEPRECATED_MODIFIER_PAIR!>abstract<!> <!DEPRECATED_MODIFIER_PAIR!>data<!> class Base(val x: Int)
class Derived: Base(42)
diff --git a/compiler/testData/diagnostics/tests/dataClasses/sealedDataClass.kt b/compiler/testData/diagnostics/tests/dataClasses/sealedDataClass.kt
new file mode 100644
index 0000000000000..8aabf0580394f
--- /dev/null
+++ b/compiler/testData/diagnostics/tests/dataClasses/sealedDataClass.kt
@@ -0,0 +1,4 @@
+<!DEPRECATED_MODIFIER_PAIR!>sealed<!> <!DEPRECATED_MODIFIER_PAIR!>data<!> class My(val x: Int) {
+ object Your: My(1)
+ class His(y: Int): My(y)
+}
diff --git a/compiler/testData/diagnostics/tests/dataClasses/sealedDataClass.txt b/compiler/testData/diagnostics/tests/dataClasses/sealedDataClass.txt
new file mode 100644
index 0000000000000..5a3e8f487091a
--- /dev/null
+++ b/compiler/testData/diagnostics/tests/dataClasses/sealedDataClass.txt
@@ -0,0 +1,31 @@
+package
+
[email protected]() public sealed class My {
+ private constructor My(/*0*/ x: kotlin.Int)
+ public final val x: kotlin.Int
+ public final /*synthesized*/ fun component1(): kotlin.Int
+ public final /*synthesized*/ fun copy(/*0*/ x: kotlin.Int = ...): My
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+
+ public final class His : My {
+ public constructor His(/*0*/ y: kotlin.Int)
+ public final override /*1*/ /*fake_override*/ val x: kotlin.Int
+ public final override /*1*/ /*fake_override*/ fun component1(): kotlin.Int
+ public final override /*1*/ /*fake_override*/ fun copy(/*0*/ x: kotlin.Int = ...): My
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+ }
+
+ public object Your : My {
+ private constructor Your()
+ public final override /*1*/ /*fake_override*/ val x: kotlin.Int
+ public final override /*1*/ /*fake_override*/ fun component1(): kotlin.Int
+ public final override /*1*/ /*fake_override*/ fun copy(/*0*/ x: kotlin.Int = ...): My
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+ }
+}
diff --git a/compiler/tests/org/jetbrains/kotlin/checkers/JetDiagnosticsTestGenerated.java b/compiler/tests/org/jetbrains/kotlin/checkers/JetDiagnosticsTestGenerated.java
index c7ad9833c5051..44ddcffb7451c 100644
--- a/compiler/tests/org/jetbrains/kotlin/checkers/JetDiagnosticsTestGenerated.java
+++ b/compiler/tests/org/jetbrains/kotlin/checkers/JetDiagnosticsTestGenerated.java
@@ -3414,6 +3414,12 @@ public void testRepeatedProperties() throws Exception {
doTest(fileName);
}
+ @TestMetadata("sealedDataClass.kt")
+ public void testSealedDataClass() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("compiler/testData/diagnostics/tests/dataClasses/sealedDataClass.kt");
+ doTest(fileName);
+ }
+
@TestMetadata("secondParamIsVal.kt")
public void testSecondParamIsVal() throws Exception {
String fileName = JetTestUtils.navigationMetadata("compiler/testData/diagnostics/tests/dataClasses/secondParamIsVal.kt");
|
b079f9911a118b01bbef4515b2cea7fa09fdd5de
|
drools
|
[DROOLS-128] fix retriving of global entry set from- a Stateless session--
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/test/java/org/drools/compiler/integrationtests/GlobalsTest.java b/drools-compiler/src/test/java/org/drools/compiler/integrationtests/GlobalsTest.java
new file mode 100644
index 00000000000..3866be067be
--- /dev/null
+++ b/drools-compiler/src/test/java/org/drools/compiler/integrationtests/GlobalsTest.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright 2013 JBoss Inc
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drools.compiler.integrationtests;
+
+import org.drools.core.impl.StatelessKnowledgeSessionImpl;
+import org.kie.api.io.ResourceType;
+import org.drools.core.base.MapGlobalResolver;
+import org.junit.Test;
+import org.kie.internal.KnowledgeBase;
+import org.kie.internal.KnowledgeBaseFactory;
+import org.kie.internal.builder.KnowledgeBuilder;
+import org.kie.internal.builder.KnowledgeBuilderFactory;
+import org.kie.internal.io.ResourceFactory;
+import org.kie.internal.runtime.StatefulKnowledgeSession;
+import org.kie.internal.runtime.StatelessKnowledgeSession;
+
+import java.util.Map;
+
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertTrue;
+
+
+public class GlobalsTest {
+
+ @Test
+ public void testGlobalAccess() {
+
+ String drl = "import org.drools.core.base.MapGlobalResolver;\n" +
+ "global java.lang.String myGlobal;\n" +
+ "global String unused; \n" ;
+
+ KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
+ kbuilder.add(ResourceFactory.newByteArrayResource(drl.getBytes()), ResourceType.DRL);
+ KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
+ kbase.addKnowledgePackages(kbuilder.getKnowledgePackages());
+ StatefulKnowledgeSession session1 = kbase.newStatefulKnowledgeSession();
+
+ String sample = "default string";
+
+ // Testing 1.
+ System.out.println("Start testing 1.");
+ session1.setGlobal("myGlobal", "Testing 1");
+ session1.insert(sample);
+ session1.fireAllRules();
+ Map.Entry[] entries1 = ((MapGlobalResolver) session1.getGlobals()).getGlobals();
+ assertEquals( 1, entries1.length );
+ assertEquals( entries1[0].getValue(), "Testing 1" );
+ assertEquals( 1, session1.getGlobals().getGlobalKeys().size() );
+ assertTrue( session1.getGlobals().getGlobalKeys().contains("myGlobal") );
+ session1.dispose();
+
+ // Testing 2.
+ System.out.println("Start testing 2.");
+ StatelessKnowledgeSession session2 = session1.getKieBase().newStatelessKnowledgeSession();
+ session2.setGlobal("myGlobal", "Testing 2");
+ session2.execute(sample);
+ Map.Entry[] entries2 = ((MapGlobalResolver) session2.getGlobals()).getGlobals();
+ assertEquals(1, entries2.length);
+ assertEquals( entries2[0].getValue(), "Testing 2" );
+ assertEquals( 1, session2.getGlobals().getGlobalKeys().size() );
+ assertTrue( session2.getGlobals().getGlobalKeys().contains("myGlobal") );
+
+ // Testing 3.
+ System.out.println("Start testing 3.");
+ StatefulKnowledgeSession session3 = ((StatelessKnowledgeSessionImpl) session2).newWorkingMemory();
+ session3.insert(sample);
+ session3.fireAllRules();
+ Map.Entry[] entries3 = ((MapGlobalResolver) session3.getGlobals()).getGlobals();
+ assertEquals( 1, entries3.length );
+ assertEquals( entries3[0].getValue(), "Testing 2" );
+ assertEquals( 1, session3.getGlobals().getGlobalKeys().size() );
+ assertTrue( session3.getGlobals().getGlobalKeys().contains("myGlobal") );
+
+
+ session3.setGlobal("myGlobal", "Testing 3 Over");
+ entries3 = ((MapGlobalResolver) session3.getGlobals()).getGlobals();
+ assertEquals(1, entries3.length);
+ assertEquals( entries3[0].getValue(), "Testing 3 Over" );
+ assertEquals( 1, session3.getGlobals().getGlobalKeys().size() );
+ assertTrue( session3.getGlobals().getGlobalKeys().contains("myGlobal") );
+
+ session3.dispose();
+
+ // Testing 4.
+ System.out.println("Start testing 4.");
+ StatefulKnowledgeSession session4 = ((StatelessKnowledgeSessionImpl) session2).newWorkingMemory();
+ session4.setGlobal("myGlobal", "Testing 4");
+ session4.insert(sample);
+ session4.fireAllRules();
+ Map.Entry[] entries4 = ((MapGlobalResolver) session4.getGlobals()).getGlobals();
+ assertEquals(1, entries4.length);
+ assertEquals( entries4[0].getValue(), "Testing 4" );
+ assertEquals( 1, session4.getGlobals().getGlobalKeys().size() );
+ assertTrue( session4.getGlobals().getGlobalKeys().contains("myGlobal") );
+
+ session4.dispose();
+ }
+}
diff --git a/drools-core/src/main/java/org/drools/core/base/MapGlobalResolver.java b/drools-core/src/main/java/org/drools/core/base/MapGlobalResolver.java
index 505bb1fede8..a0cf91e9034 100644
--- a/drools-core/src/main/java/org/drools/core/base/MapGlobalResolver.java
+++ b/drools-core/src/main/java/org/drools/core/base/MapGlobalResolver.java
@@ -20,7 +20,10 @@
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
+import java.util.Collection;
+import java.util.Collections;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
@@ -34,7 +37,7 @@ public class MapGlobalResolver
private static final long serialVersionUID = 510l;
- private Map map;
+ private Map<String,Object> map;
private Globals delegate;
@@ -59,7 +62,19 @@ public void writeExternal(ObjectOutput out) throws IOException {
public void setDelegate(Globals delegate) {
this.delegate = delegate;
}
-
+
+ public Collection<String> getGlobalKeys() {
+ if ( delegate == null ) {
+ return Collections.unmodifiableCollection(map.keySet());
+ } else if ( delegate != null && map.size() == 0 ) {
+ return Collections.unmodifiableCollection( ((MapGlobalResolver) delegate).map.keySet() );
+ } else {
+ Collection<String> combined = new HashSet<String>( map.keySet() );
+ combined.addAll( ((MapGlobalResolver) delegate).map.keySet() );
+ return Collections.unmodifiableCollection( combined );
+ }
+ }
+
public Object get(String identifier) {
return resolveGlobal( identifier );
}
@@ -81,14 +96,23 @@ public void setGlobal(String identifier, Object value) {
value );
}
- public Entry[] getGlobals() {
- return (Entry[]) this.map.entrySet().toArray(new Entry[this.map.size()]);
+ public Entry<String,Object>[] getGlobals() {
+ if ( delegate == null ) {
+ return (Entry[]) this.map.entrySet().toArray(new Entry[this.map.size()]);
+ } else if ( delegate != null && map.size() == 0 ) {
+ Map<String,Object> delegateMap = ((MapGlobalResolver) delegate).map;
+ return (Entry[]) delegateMap.entrySet().toArray(new Entry[delegateMap.size()]);
+ } else {
+ Map<String,Object> combined = new HashMap<String,Object>( ((MapGlobalResolver) delegate).map );
+ combined.putAll( map );
+ return (Entry[]) combined.entrySet().toArray(new Entry[combined.size()]);
+ }
}
public GlobalResolver clone() {
- Map clone = new HashMap();
+ Map<String,Object> clone = new HashMap<String,Object>();
- for ( Entry entry : getGlobals() ) {
+ for ( Entry<String,Object> entry : getGlobals() ) {
clone.put( entry.getKey(), entry.getValue() );
}
return new MapGlobalResolver( clone );
|
165c8954c94ca67d75ecd7fcbc7f1d3da41f5473
|
orientdb
|
removed not useful system out from test cases.--
|
p
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/test/java/com/orientechnologies/orient/core/sql/lock/TestQueryRecordLockUnlock.java b/core/src/test/java/com/orientechnologies/orient/core/sql/lock/TestQueryRecordLockUnlock.java
index 81080a02da4..41a447bf417 100644
--- a/core/src/test/java/com/orientechnologies/orient/core/sql/lock/TestQueryRecordLockUnlock.java
+++ b/core/src/test/java/com/orientechnologies/orient/core/sql/lock/TestQueryRecordLockUnlock.java
@@ -54,7 +54,6 @@ public void run() {
db.close();
}
}
- System.out.println(Thread.currentThread().getName() + " done.");
}
});
}
@@ -112,7 +111,6 @@ public void run() {
db.close();
}
}
- System.out.println(Thread.currentThread().getName() + " done.");
}
});
}
@@ -171,7 +169,6 @@ public void run() {
}
}
}
- System.out.println(Thread.currentThread().getName() + " done.");
}
});
}
|
fa63d0a22133e1b785f500a1268edc66a2c3f14a
|
drools
|
JBRULES-2849 workaround bug in JDK 5 concerning- alternatives in lookbehind assertions, using alternatives of lookbehind- assertions rather than a lookbehind assertion with alternatives.--
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/main/java/org/drools/lang/dsl/AntlrDSLMappingEntry.java b/drools-compiler/src/main/java/org/drools/lang/dsl/AntlrDSLMappingEntry.java
index e8575ecf7c4..1678803a4e0 100644
--- a/drools-compiler/src/main/java/org/drools/lang/dsl/AntlrDSLMappingEntry.java
+++ b/drools-compiler/src/main/java/org/drools/lang/dsl/AntlrDSLMappingEntry.java
@@ -71,7 +71,11 @@ public void setKeyPattern(final String keyPat) {
if ( !keyPattern.startsWith( "^" ) ) {
// making it start after a non word char or at line start
- keyPattern = "(?<=\\W|^)" + keyPattern;
+ // JDK 5 (java version 1.5.0_22) is buggy: doesn't handle alternatives within
+ // zero-width lookbehind assertions. As a workaround, we use an alternative of
+ // zero-width lookbehind assertions, slightly longer, but simpler than anything else.
+ // keyPattern = "(?<=\\W|^)" + keyPattern; // works in JDK >=6
+ keyPattern = "(?:(?<=^)|(?<=\\W))" + keyPattern;
}
// If the pattern ends with a pure variable whose pattern could create
diff --git a/drools-compiler/src/test/java/org/drools/lang/dsl/DSLMappingEntryTest.java b/drools-compiler/src/test/java/org/drools/lang/dsl/DSLMappingEntryTest.java
index 5d9c07fdc06..1329cf4a4e0 100755
--- a/drools-compiler/src/test/java/org/drools/lang/dsl/DSLMappingEntryTest.java
+++ b/drools-compiler/src/test/java/org/drools/lang/dsl/DSLMappingEntryTest.java
@@ -9,6 +9,10 @@
public class DSLMappingEntryTest extends TestCase {
+ // Due to a bug in JDK 5, a workaround for zero-widht lookbehind has to be used.
+ // JDK works correctly with "(?<=^|\\W)"
+ private static final String lookbehind = "(?:(?<=^)|(?<=\\W))";
+
protected void setUp() throws Exception {
super.setUp();
}
@@ -39,7 +43,7 @@ public void testPatternCalculation() throws IOException {
final String inputKey = "The Customer name is {name} and surname is {surname} and it has US$ 50,00 on his {pocket}";
final String inputValue = "Customer( name == \"{name}\", surname == \"{surname}\", money > $money )";
- final String expectedKeyP = "(?<=\\W|^)The\\s+Customer\\s+name\\s+is\\s+(.*?)\\s+and\\s+surname\\s+is\\s+(.*?)\\s+and\\s+it\\s+has\\s+US\\$\\s+50,00\\s+on\\s+his\\s+(.*?)$";
+ final String expectedKeyP = lookbehind + "The\\s+Customer\\s+name\\s+is\\s+(.*?)\\s+and\\s+surname\\s+is\\s+(.*?)\\s+and\\s+it\\s+has\\s+US\\$\\s+50,00\\s+on\\s+his\\s+(.*?)$";
final String expectedValP = "Customer( name == \"{name}\", surname == \"{surname}\", money > $money )";
final DSLMappingEntry entry = createEntry( inputKey,
@@ -59,7 +63,7 @@ public void testPatternCalculation2() throws IOException {
final String inputKey = "-name is {name}";
final String inputValue = "name == \"{name}\"";
- final String expectedKeyP = "(?<=\\W|^)-\\s*name\\s+is\\s+(.*?)$";
+ final String expectedKeyP = lookbehind + "-\\s*name\\s+is\\s+(.*?)$";
final String expectedValP = "name == \"{name}\"";
final DSLMappingEntry entry = createEntry( inputKey,
@@ -80,7 +84,7 @@ public void testPatternCalculation3() throws IOException {
final String inputKey = "- name is {name}";
final String inputValue = "name == \"{name}\"";
- final String expectedKeyP = "(?<=\\W|^)-\\s*name\\s+is\\s+(.*?)$";
+ final String expectedKeyP = lookbehind + "-\\s*name\\s+is\\s+(.*?)$";
final String expectedValP = "name == \"{name}\"";
final DSLMappingEntry entry = createEntry( inputKey,
@@ -173,7 +177,7 @@ public void testExpandWithBrackets() throws IOException {
DSLMappingEntry entry5 = this.createEntry( "When the credit rating is {rating:regex:\\d{3}}",
"applicant:Applicant(credit=={rating})" );
- assertEquals( "(?<=\\W|^)When\\s+the\\s+credit\\s+rating\\s+is\\s+(\\d{3})(?=\\W|$)",
+ assertEquals( lookbehind + "When\\s+the\\s+credit\\s+rating\\s+is\\s+(\\d{3})(?=\\W|$)",
entry5.getKeyPattern().toString() );
assertEquals( "applicant:Applicant(credit=={rating})",
entry5.getValuePattern() );
@@ -181,7 +185,7 @@ public void testExpandWithBrackets() throws IOException {
DSLMappingEntry entry6 = this.createEntry( "This is a sentence with line breaks",
"Cheese\\n(price == 10)" );
- assertEquals( "(?<=\\W|^)This\\s+is\\s+a\\s+sentence\\s+with\\s+line\\s+breaks(?=\\W|$)",
+ assertEquals( lookbehind + "This\\s+is\\s+a\\s+sentence\\s+with\\s+line\\s+breaks(?=\\W|$)",
entry6.getKeyPattern().toString() );
assertEquals( "Cheese\n(price == 10)",
entry6.getValuePattern());
@@ -189,14 +193,14 @@ public void testExpandWithBrackets() throws IOException {
DSLMappingEntry entry7 = this.createEntry( "Bedingung-\\#19-MKM4",
"eval ( $p.getTempVal(\"\\#UML-ATZ-1\") < $p.getZvUmlStfr() )" );
- assertEquals( "(?<=\\W|^)Bedingung-#19-MKM4(?=\\W|$)",
+ assertEquals( lookbehind + "Bedingung-#19-MKM4(?=\\W|$)",
entry7.getKeyPattern().toString() );
assertEquals( "eval ( $p.getTempVal(\"#UML-ATZ-1\") < $p.getZvUmlStfr() )",
entry7.getValuePattern());
DefaultExpander ex = makeExpander( entry1, entry2, entry3, entry4,
entry5, entry6, entry7 );
- StringBuilder sb = new StringBuilder( "rule x\n" + "when\n" );
+ StringBuilder sb = new StringBuilder( "rule x\n" ).append( "when\n" );
sb.append( "attr name is in [ 'Edson', 'Bob' ]" ).append( "\n" );
sb.append( "he (is) a $xx handsome man" ).append( "\n" );
diff --git a/drools-compiler/src/test/java/org/drools/lang/dsl/DSLTokenizedMappingFileTest.java b/drools-compiler/src/test/java/org/drools/lang/dsl/DSLTokenizedMappingFileTest.java
index 681dc64c446..47b02fbfc60 100644
--- a/drools-compiler/src/test/java/org/drools/lang/dsl/DSLTokenizedMappingFileTest.java
+++ b/drools-compiler/src/test/java/org/drools/lang/dsl/DSLTokenizedMappingFileTest.java
@@ -8,6 +8,11 @@
import junit.framework.TestCase;
public class DSLTokenizedMappingFileTest extends TestCase {
+
+ // Due to a bug in JDK 5, a workaround for zero-widht lookbehind has to be used.
+ // JDK works correctly with "(?<=^|\\W)"
+ private static final String lookbehind = "(?:(?<=^)|(?<=\\W))";
+
private DSLMappingFile file = null;
private final String filename = "test_metainfo.dsl";
@@ -62,7 +67,7 @@ public void testParseFileWithBrackets() {
entry.getSection() );
assertEquals( DSLMappingEntry.EMPTY_METADATA,
entry.getMetaData() );
- assertEquals( "(?<=\\W|^)ATTRIBUTE\\s+\"(.*?)\"\\s+IS\\s+IN\\s+[(.*?)](?=\\W|$)",
+ assertEquals( lookbehind + "ATTRIBUTE\\s+\"(.*?)\"\\s+IS\\s+IN\\s+[(.*?)](?=\\W|$)",
entry.getKeyPattern().toString() );
//Attribute( {attr} in ({list}) )
assertEquals( "Attribute( {attr} in ({list}) )",
@@ -97,7 +102,7 @@ public void testParseFileWithEscaptedBrackets() {
assertEquals( DSLMappingEntry.EMPTY_METADATA,
entry.getMetaData() );
- assertEquals( "(?<=\\W|^)ATTRIBUTE\\s+\"(.*?)\"\\s+IS\\s+IN\\s+\\[(.*?)\\](?=\\W|$)",
+ assertEquals( lookbehind + "ATTRIBUTE\\s+\"(.*?)\"\\s+IS\\s+IN\\s+\\[(.*?)\\](?=\\W|$)",
entry.getKeyPattern().toString() );
//Attribute( {attr} in ({list}) )
assertEquals( "Attribute( {attr} in ({list}) )",
@@ -166,7 +171,7 @@ public void testParseFileWithEscaptedEquals() {
entry.getSection() );
assertEquals( DSLMappingEntry.EMPTY_METADATA,
entry.getMetaData() );
- assertEquals( "(?<=\\W|^)something:\\=(.*?)$",
+ assertEquals( lookbehind + "something:\\=(.*?)$",
entry.getKeyPattern().toString() );
assertEquals( "Attribute( something == \"{value}\" )",
entry.getValuePattern() );
|
fc8ae4d30dee9b255f7caa7da76a9793b1cc6c25
|
elasticsearch
|
[TEST] Added test that verifies data integrity- during and after a simulated network split.--
|
p
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/test/java/org/elasticsearch/discovery/DiscoveryWithNetworkFailuresTests.java b/src/test/java/org/elasticsearch/discovery/DiscoveryWithNetworkFailuresTests.java
index 1a2c01ccbbbc8..7c0824cc605ba 100644
--- a/src/test/java/org/elasticsearch/discovery/DiscoveryWithNetworkFailuresTests.java
+++ b/src/test/java/org/elasticsearch/discovery/DiscoveryWithNetworkFailuresTests.java
@@ -21,13 +21,25 @@
import com.google.common.base.Predicate;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
+import org.elasticsearch.action.get.GetResponse;
+import org.elasticsearch.action.index.IndexRequestBuilder;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.block.ClusterBlock;
+import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.search.SearchHit;
+import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.test.transport.MockTransportService;
@@ -40,26 +52,46 @@
import static org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
import static org.elasticsearch.test.ElasticsearchIntegrationTest.Scope;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.is;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
+import static org.hamcrest.Matchers.*;
/**
*/
@ClusterScope(scope= Scope.TEST, numDataNodes =0)
public class DiscoveryWithNetworkFailuresTests extends ElasticsearchIntegrationTest {
+ private static final Settings nodeSettings = ImmutableSettings.settingsBuilder()
+ .put("discovery.type", "zen") // <-- To override the local setting if set externally
+ .put("discovery.zen.fd.ping_timeout", "1s") // <-- for hitting simulated network failures quickly
+ .put("discovery.zen.fd.ping_retries", "1") // <-- for hitting simulated network failures quickly
+ .put("discovery.zen.minimum_master_nodes", 2)
+ .put(TransportModule.TRANSPORT_SERVICE_TYPE_KEY, MockTransportService.class.getName())
+ .build();
+
+ @Override
+ protected int numberOfShards() {
+ return 3;
+ }
+
+ @Override
+ protected int numberOfReplicas() {
+ return 1;
+ }
+
+ @Override
+ public Settings indexSettings() {
+ Settings settings = super.indexSettings();
+ return ImmutableSettings.builder()
+ .put(settings)
+ .put(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE, 2)
+ .build();
+ }
+
@Test
@TestLogging("discovery.zen:TRACE")
public void failWithMinimumMasterNodesConfigured() throws Exception {
- final Settings settings = ImmutableSettings.settingsBuilder()
- .put("discovery.type", "zen") // <-- To override the local setting if set externally
- .put("discovery.zen.fd.ping_timeout", "1s") // <-- for hitting simulated network failures quickly
- .put("discovery.zen.fd.ping_retries", "1") // <-- for hitting simulated network failures quickly
- .put("discovery.zen.minimum_master_nodes", 2)
- .put(TransportModule.TRANSPORT_SERVICE_TYPE_KEY, MockTransportService.class.getName())
- .build();
- List<String> nodes = internalCluster().startNodesAsync(3, settings).get();
+ List<String> nodes = internalCluster().startNodesAsync(3, nodeSettings).get();
// Wait until a green status has been reaches and 3 nodes are part of the cluster
ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth()
@@ -145,6 +177,185 @@ public boolean apply(Object input) {
}
}
+ @Test
+ @TestLogging("discovery.zen:TRACE,action:TRACE,cluster.service:TRACE")
+ public void testDataConsistency() throws Exception {
+ List<String> nodes = internalCluster().startNodesAsync(3, nodeSettings).get();
+
+ // Wait until a green status has been reaches and 3 nodes are part of the cluster
+ ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth()
+ .setWaitForEvents(Priority.LANGUID)
+ .setWaitForNodes("3")
+ .get();
+ assertThat(clusterHealthResponse.isTimedOut(), is(false));
+
+ assertAcked(prepareCreate("test")
+ .addMapping("type", "field", "type=long")
+ .get());
+
+ IndexRequestBuilder[] indexRequests = new IndexRequestBuilder[1 + randomInt(1000)];
+ for (int i = 0; i < indexRequests.length; i++) {
+ indexRequests[i] = client().prepareIndex("test", "type", String.valueOf(i)).setSource("field", i);
+ }
+ indexRandom(true, indexRequests);
+
+
+ for (int i = 0; i < indexRequests.length; i++) {
+ GetResponse getResponse = client().prepareGet("test", "type", String.valueOf(i)).get();
+ assertThat(getResponse.isExists(), is(true));
+ assertThat(getResponse.getVersion(), equalTo(1l));
+ assertThat(getResponse.getId(), equalTo(String.valueOf(i)));
+ }
+ SearchResponse searchResponse = client().prepareSearch("test").setTypes("type")
+ .addSort("field", SortOrder.ASC)
+ .get();
+ assertHitCount(searchResponse, indexRequests.length);
+ for (int i = 0; i < searchResponse.getHits().getHits().length; i++) {
+ SearchHit searchHit = searchResponse.getHits().getAt(i);
+ assertThat(searchHit.id(), equalTo(String.valueOf(i)));
+ assertThat((long) searchHit.sortValues()[0], equalTo((long) i));
+ }
+
+ // Figure out what is the elected master node
+ DiscoveryNode masterDiscoNode = null;
+ for (String node : nodes) {
+ ClusterState state = internalCluster().client(node).admin().cluster().prepareState().setLocal(true).execute().actionGet().getState();
+ assertThat(state.nodes().size(), equalTo(3));
+ if (masterDiscoNode == null) {
+ masterDiscoNode = state.nodes().masterNode();
+ } else {
+ assertThat(state.nodes().masterNode(), equalTo(masterDiscoNode));
+ }
+ }
+ assert masterDiscoNode != null;
+ logger.info("---> legit elected master node=" + masterDiscoNode);
+ final Client masterClient = internalCluster().masterClient();
+
+ // Everything is stable now, it is now time to simulate evil...
+
+ // Pick a node that isn't the elected master.
+ String unluckyNode = null;
+ for (String node : nodes) {
+ if (!node.equals(masterDiscoNode.getName())) {
+ unluckyNode = node;
+ }
+ }
+ assert unluckyNode != null;
+
+ // Simulate a network issue between the unlucky node and the rest of the cluster.
+ for (String nodeId : nodes) {
+ if (!nodeId.equals(unluckyNode)) {
+ addFailToSendNoConnectRule(nodeId, unluckyNode);
+ addFailToSendNoConnectRule(unluckyNode, nodeId);
+ }
+ }
+ try {
+ // Wait until elected master has removed that the unlucky node...
+ boolean applied = awaitBusy(new Predicate<Object>() {
+ @Override
+ public boolean apply(Object input) {
+ return masterClient.admin().cluster().prepareState().setLocal(true).get().getState().nodes().size() == 2;
+ }
+ }, 1, TimeUnit.MINUTES);
+ assertThat(applied, is(true));
+
+ // The unlucky node must report *no* master node, since it can't connect to master and in fact it should
+ // continuously ping until network failures have been resolved. However
+ final Client isolatedNodeClient = internalCluster().client(unluckyNode);
+ // It may a take a bit before the node detects it has been cut off from the elected master
+ applied = awaitBusy(new Predicate<Object>() {
+ @Override
+ public boolean apply(Object input) {
+ ClusterState localClusterState = isolatedNodeClient.admin().cluster().prepareState().setLocal(true).get().getState();
+ DiscoveryNodes localDiscoveryNodes = localClusterState.nodes();
+ logger.info("localDiscoveryNodes=" + localDiscoveryNodes.prettyPrint());
+ return localDiscoveryNodes.masterNode() == null;
+ }
+ }, 10, TimeUnit.SECONDS);
+ assertThat(applied, is(true));
+
+ ClusterHealthResponse healthResponse = masterClient.admin().cluster().prepareHealth("test")
+ .setWaitForYellowStatus().get();
+ assertThat(healthResponse.isTimedOut(), is(false));
+ assertThat(healthResponse.getStatus(), equalTo(ClusterHealthStatus.YELLOW));
+
+ // Reads on the right side of the split must work
+ searchResponse = masterClient.prepareSearch("test").setTypes("type")
+ .addSort("field", SortOrder.ASC)
+ .get();
+ assertHitCount(searchResponse, indexRequests.length);
+ for (int i = 0; i < searchResponse.getHits().getHits().length; i++) {
+ SearchHit searchHit = searchResponse.getHits().getAt(i);
+ assertThat(searchHit.id(), equalTo(String.valueOf(i)));
+ assertThat((long) searchHit.sortValues()[0], equalTo((long) i));
+ }
+
+ // Reads on the wrong side of the split are partial
+ searchResponse = isolatedNodeClient.prepareSearch("test").setTypes("type")
+ .addSort("field", SortOrder.ASC)
+ .get();
+ assertThat(searchResponse.getSuccessfulShards(), lessThan(searchResponse.getTotalShards()));
+ assertThat(searchResponse.getHits().totalHits(), lessThan((long) indexRequests.length));
+
+ // Writes on the right side of the split must work
+ UpdateResponse updateResponse = masterClient.prepareUpdate("test", "type", "0").setDoc("field2", 2).get();
+ assertThat(updateResponse.getVersion(), equalTo(2l));
+
+ // Writes on the wrong side of the split fail
+ try {
+ isolatedNodeClient.prepareUpdate("test", "type", "0").setDoc("field2", 2)
+ .setTimeout(TimeValue.timeValueSeconds(5)) // Fail quick, otherwise we wait 60 seconds.
+ .get();
+ } catch (ClusterBlockException exception) {
+ assertThat(exception.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
+ assertThat(exception.blocks().size(), equalTo(1));
+ ClusterBlock clusterBlock = exception.blocks().iterator().next();
+ assertThat(clusterBlock.id(), equalTo(DiscoverySettings.NO_MASTER_BLOCK_ID));
+ }
+ } finally {
+ // stop simulating network failures, from this point on the unlucky node is able to rejoin
+ // We also need to do this even if assertions fail, since otherwise the test framework can't work properly
+ for (String nodeId : nodes) {
+ if (!nodeId.equals(unluckyNode)) {
+ clearNoConnectRule(nodeId, unluckyNode);
+ clearNoConnectRule(unluckyNode, nodeId);
+ }
+ }
+ }
+
+ // Wait until the master node sees all 3 nodes again.
+ clusterHealthResponse = masterClient.admin().cluster().prepareHealth()
+ .setWaitForGreenStatus()
+ .setWaitForEvents(Priority.LANGUID)
+ .setWaitForNodes("3")
+ .get();
+ assertThat(clusterHealthResponse.isTimedOut(), is(false));
+
+ for (String node : nodes) {
+ Client client = internalCluster().client(node);
+ searchResponse = client.prepareSearch("test").setTypes("type")
+ .addSort("field", SortOrder.ASC)
+ .get();
+ for (int i = 0; i < searchResponse.getHits().getHits().length; i++) {
+ SearchHit searchHit = searchResponse.getHits().getAt(i);
+ assertThat(searchHit.id(), equalTo(String.valueOf(i)));
+ assertThat((long) searchHit.sortValues()[0], equalTo((long) i));
+ }
+
+
+ GetResponse getResponse = client().prepareGet("test", "type", "0").get();
+ assertThat(getResponse.isExists(), is(true));
+ assertThat(getResponse.getVersion(), equalTo(2l));
+ assertThat(getResponse.getId(), equalTo("0"));
+ for (int i = 1; i < indexRequests.length; i++) {
+ getResponse = client().prepareGet("test", "type", String.valueOf(i)).get();
+ assertThat(getResponse.isExists(), is(true));
+ assertThat(getResponse.getVersion(), equalTo(1l));
+ assertThat(getResponse.getId(), equalTo(String.valueOf(i)));
+ }
+ }
+ }
+
private void addFailToSendNoConnectRule(String fromNode, String toNode) {
TransportService mockTransportService = internalCluster().getInstance(TransportService.class, fromNode);
((MockTransportService) mockTransportService).addFailToSendNoConnectRule(internalCluster().getInstance(Discovery.class, toNode).localNode());
|
ac61b13a7c8284dea58ca4d2a046a44d317ced00
|
spring-framework
|
AnnotatedElementUtils wraps unexpected exceptions- with descriptive IllegalStateException--Issue: SPR-10441-
|
c
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-core/src/main/java/org/springframework/core/annotation/AnnotatedElementUtils.java b/spring-core/src/main/java/org/springframework/core/annotation/AnnotatedElementUtils.java
index 8494c0f51d25..78fc584adb46 100644
--- a/spring-core/src/main/java/org/springframework/core/annotation/AnnotatedElementUtils.java
+++ b/spring-core/src/main/java/org/springframework/core/annotation/AnnotatedElementUtils.java
@@ -42,7 +42,6 @@ public class AnnotatedElementUtils {
public static Set<String> getMetaAnnotationTypes(AnnotatedElement element, String annotationType) {
final Set<String> types = new LinkedHashSet<String>();
process(element, annotationType, true, new Processor<Object>() {
-
@Override
public Object process(Annotation annotation, int metaDepth) {
if (metaDepth > 0) {
@@ -50,7 +49,6 @@ public Object process(Annotation annotation, int metaDepth) {
}
return null;
}
-
@Override
public void postProcess(Annotation annotation, Object result) {
}
@@ -60,7 +58,6 @@ public void postProcess(Annotation annotation, Object result) {
public static boolean hasMetaAnnotationTypes(AnnotatedElement element, String annotationType) {
return Boolean.TRUE.equals(process(element, annotationType, true, new Processor<Boolean>() {
-
@Override
public Boolean process(Annotation annotation, int metaDepth) {
if (metaDepth > 0) {
@@ -68,7 +65,6 @@ public Boolean process(Annotation annotation, int metaDepth) {
}
return null;
}
-
@Override
public void postProcess(Annotation annotation, Boolean result) {
}
@@ -77,12 +73,10 @@ public void postProcess(Annotation annotation, Boolean result) {
public static boolean isAnnotated(AnnotatedElement element, String annotationType) {
return Boolean.TRUE.equals(process(element, annotationType, true, new Processor<Boolean>() {
-
@Override
public Boolean process(Annotation annotation, int metaDepth) {
return Boolean.TRUE;
}
-
@Override
public void postProcess(Annotation annotation, Boolean result) {
}
@@ -97,12 +91,10 @@ public static AnnotationAttributes getAnnotationAttributes(AnnotatedElement elem
final boolean classValuesAsString, final boolean nestedAnnotationsAsMap) {
return process(element, annotationType, true, new Processor<AnnotationAttributes>() {
-
@Override
public AnnotationAttributes process(Annotation annotation, int metaDepth) {
return AnnotationUtils.getAnnotationAttributes(annotation, classValuesAsString, nestedAnnotationsAsMap);
}
-
@Override
public void postProcess(Annotation annotation, AnnotationAttributes result) {
for (String key : result.keySet()) {
@@ -117,8 +109,7 @@ public void postProcess(Annotation annotation, AnnotationAttributes result) {
});
}
- public static MultiValueMap<String, Object> getAllAnnotationAttributes(AnnotatedElement element,
- final String annotationType) {
+ public static MultiValueMap<String, Object> getAllAnnotationAttributes(AnnotatedElement element, String annotationType) {
return getAllAnnotationAttributes(element, annotationType, false, false);
}
@@ -127,7 +118,6 @@ public static MultiValueMap<String, Object> getAllAnnotationAttributes(Annotated
final MultiValueMap<String, Object> attributes = new LinkedMultiValueMap<String, Object>();
process(element, annotationType, false, new Processor<Void>() {
-
@Override
public Void process(Annotation annotation, int metaDepth) {
if (annotation.annotationType().getName().equals(annotationType)) {
@@ -138,7 +128,6 @@ public Void process(Annotation annotation, int metaDepth) {
}
return null;
}
-
@Override
public void postProcess(Annotation annotation, Void result) {
for (String key : attributes.keySet()) {
@@ -157,12 +146,10 @@ public void postProcess(Annotation annotation, Void result) {
/**
* Process all annotations of the specified {@code annotationType} and
* recursively all meta-annotations on the specified {@code element}.
- *
* <p>If the {@code traverseClassHierarchy} flag is {@code true} and the sought
* annotation is neither <em>directly present</em> on the given element nor
* present on the given element as a meta-annotation, then the algorithm will
* recursively search through the class hierarchy of the given element.
- *
* @param element the annotated element
* @param annotationType the annotation type to find
* @param traverseClassHierarchy whether or not to traverse up the class
@@ -172,19 +159,23 @@ public void postProcess(Annotation annotation, Void result) {
*/
private static <T> T process(AnnotatedElement element, String annotationType, boolean traverseClassHierarchy,
Processor<T> processor) {
- return doProcess(element, annotationType, traverseClassHierarchy, processor, new HashSet<AnnotatedElement>(), 0);
+
+ try {
+ return doProcess(element, annotationType, traverseClassHierarchy, processor, new HashSet<AnnotatedElement>(), 0);
+ }
+ catch (Throwable ex) {
+ throw new IllegalStateException("Failed to introspect annotations: " + element, ex);
+ }
}
/**
* Perform the search algorithm for the {@link #process} method, avoiding
* endless recursion by tracking which annotated elements have already been
* <em>visited</em>.
- *
* <p>The {@code metaDepth} parameter represents the depth of the annotation
* relative to the initial element. For example, an annotation that is
* <em>present</em> on the element will have a depth of 0; a meta-annotation
* will have a depth of 1; and a meta-meta-annotation will have a depth of 2.
- *
* @param element the annotated element
* @param annotationType the annotation type to find
* @param traverseClassHierarchy whether or not to traverse up the class
@@ -198,10 +189,8 @@ private static <T> T doProcess(AnnotatedElement element, String annotationType,
Processor<T> processor, Set<AnnotatedElement> visited, int metaDepth) {
if (visited.add(element)) {
-
- Annotation[] annotations = traverseClassHierarchy ? element.getDeclaredAnnotations()
- : element.getAnnotations();
-
+ Annotation[] annotations =
+ (traverseClassHierarchy ? element.getDeclaredAnnotations() : element.getAnnotations());
for (Annotation annotation : annotations) {
if (annotation.annotationType().getName().equals(annotationType) || metaDepth > 0) {
T result = processor.process(annotation, metaDepth);
@@ -216,7 +205,6 @@ private static <T> T doProcess(AnnotatedElement element, String annotationType,
}
}
}
-
for (Annotation annotation : annotations) {
if (!isInJavaLangAnnotationPackage(annotation)) {
T result = doProcess(annotation.annotationType(), annotationType, traverseClassHierarchy,
@@ -227,7 +215,6 @@ private static <T> T doProcess(AnnotatedElement element, String annotationType,
}
}
}
-
if (traverseClassHierarchy && element instanceof Class) {
Class<?> superclass = ((Class<?>) element).getSuperclass();
if (superclass != null && !superclass.equals(Object.class)) {
@@ -239,7 +226,6 @@ private static <T> T doProcess(AnnotatedElement element, String annotationType,
}
}
}
-
return null;
}
@@ -252,13 +238,11 @@ private static interface Processor<T> {
/**
* Called to process the annotation.
- *
* <p>The {@code metaDepth} parameter represents the depth of the
* annotation relative to the initial element. For example, an annotation
* that is <em>present</em> on the element will have a depth of 0; a
* meta-annotation will have a depth of 1; and a meta-meta-annotation
* will have a depth of 2.
- *
* @param annotation the annotation to process
* @param metaDepth the depth of the annotation relative to the initial element
* @return the result of the processing or {@code null} to continue
|
aa6d81a239cf699410b26517abba69a650b5ba42
|
intellij-community
|
SMTestRunner: Statistics tab, total column- caption improved--
|
p
|
https://github.com/JetBrains/intellij-community
|
diff --git a/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/sm/runner/ui/statistics/ColumnTest.java b/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/sm/runner/ui/statistics/ColumnTest.java
index 812162ec8d655..83f9aadbb4b3a 100644
--- a/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/sm/runner/ui/statistics/ColumnTest.java
+++ b/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/sm/runner/ui/statistics/ColumnTest.java
@@ -61,6 +61,9 @@ public void customizeCellRenderer(final JTable table,
//Black bold for with caption "Total" for parent suite of items in statistics
if (myProxy.isSuite() && isFirstLine(row)) {
append(TOTAL_TITLE, SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES);
+ if (myProxy.getParent() != null) {
+ append(" (" + myProxy.getName() + ")", SimpleTextAttributes.GRAYED_ATTRIBUTES);
+ }
return;
}
//Black, regular for other suites and tests
diff --git a/plugins/ruby/testSrc/org/jetbrains/plugins/ruby/testing/sm/runner/ui/statistics/ColumnTestTest.java b/plugins/ruby/testSrc/org/jetbrains/plugins/ruby/testing/sm/runner/ui/statistics/ColumnTestTest.java
index 8af5dfc625fbf..85e981a05afd1 100644
--- a/plugins/ruby/testSrc/org/jetbrains/plugins/ruby/testing/sm/runner/ui/statistics/ColumnTestTest.java
+++ b/plugins/ruby/testSrc/org/jetbrains/plugins/ruby/testing/sm/runner/ui/statistics/ColumnTestTest.java
@@ -68,7 +68,21 @@ public void testTotal_RegularSuite() {
assertEquals(mySuite.getPresentableName(), myFragmentsContainer.getTextAt(0));
}
- public void testTotal_TotalSuite() {
+ public void testTotal_TotalNotRootSuite() {
+ // pre condition
+ assertEquals("suite", mySuite.getName());
+
+ final SMTestProxy newRootSuite = createSuiteProxy("root_suite");
+ mySuite.setParent(newRootSuite);
+ doRender(mySuite, 0);
+ assertFragmentsSize(2);
+ assertEquals(SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES, myFragmentsContainer.getAttribsAt(0));
+ assertEquals("Total:", myFragmentsContainer.getTextAt(0));
+ assertEquals(SimpleTextAttributes.GRAYED_ATTRIBUTES, myFragmentsContainer.getAttribsAt(1));
+ assertEquals(" (suite)", myFragmentsContainer.getTextAt(1));
+ }
+
+ public void testTotal_TotalRootSuite() {
doRender(mySuite, 0);
assertFragmentsSize(1);
assertEquals(SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES, myFragmentsContainer.getAttribsAt(0));
|
dc9563874163393853426a050003de459a86f048
|
kotlin
|
Method cache for ClassDescriptorSerializer--
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/tests/org/jetbrains/jet/test/util/NamespaceComparator.java b/compiler/tests/org/jetbrains/jet/test/util/NamespaceComparator.java
index 0f7ad6edcbf3f..96435ae1283ad 100644
--- a/compiler/tests/org/jetbrains/jet/test/util/NamespaceComparator.java
+++ b/compiler/tests/org/jetbrains/jet/test/util/NamespaceComparator.java
@@ -728,12 +728,18 @@ public void serialize(ClassDescriptor klass) {
}
}
+ private static final MethodCache CLASS_OBJECT_SERIALIZER_METHOD_CACHE = new MethodCache(ClassObjectSerializer.class);
private class ClassObjectSerializer extends FullContentSerialier {
private ClassObjectSerializer(StringBuilder sb) {
super(sb);
}
+ @Override
+ protected MethodCache doGetMethodCache() {
+ return CLASS_OBJECT_SERIALIZER_METHOD_CACHE;
+ }
+
@Override
public void serialize(ClassKind kind) {
assert kind == ClassKind.OBJECT : "Must be called for class objects only";
|
3990e8b478b1d958479c173c74946e38360cfd17
|
hadoop
|
Merge r1503933 from trunk to branch-2 for YARN-513.- Create common proxy client for communicating with RM (Xuan Gong & Jian He via- bikas)--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1503935 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 65d19bff9d839..4d6cb00b23eca 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -465,6 +465,9 @@ Release 2.1.0-beta - 2013-07-02
YARN-521. Augment AM - RM client module to be able to request containers
only at specific locations (Sandy Ryza via bikas)
+ YARN-513. Create common proxy client for communicating with RM. (Xuan Gong
+ & Jian He via bikas)
+
OPTIMIZATIONS
YARN-512. Log aggregation root directory check is more expensive than it
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index 44c35c3d58b28..b14e65225200d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -655,17 +655,17 @@ public class YarnConfiguration extends Configuration {
public static final long DEFAULT_NM_PROCESS_KILL_WAIT_MS =
2000;
- /** Max time to wait to establish a connection to RM when NM starts
+ /** Max time to wait to establish a connection to RM
*/
- public static final String RESOURCEMANAGER_CONNECT_WAIT_SECS =
- NM_PREFIX + "resourcemanager.connect.wait.secs";
- public static final int DEFAULT_RESOURCEMANAGER_CONNECT_WAIT_SECS =
+ public static final String RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS =
+ RM_PREFIX + "resourcemanager.connect.max.wait.secs";
+ public static final int DEFAULT_RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS =
15*60;
- /** Time interval between each NM attempt to connect to RM
+ /** Time interval between each attempt to connect to RM
*/
public static final String RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS =
- NM_PREFIX + "resourcemanager.connect.retry_interval.secs";
+ RM_PREFIX + "resourcemanager.connect.retry_interval.secs";
public static final long DEFAULT_RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS
= 30;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/ClientRMProxy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/ClientRMProxy.java
new file mode 100644
index 0000000000000..f70b44ce3a8db
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/ClientRMProxy.java
@@ -0,0 +1,65 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements. See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership. The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.yarn.client;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
+import org.apache.hadoop.yarn.api.ApplicationMasterProtocol;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocol;
+
+public class ClientRMProxy<T> extends RMProxy<T>{
+
+ private static final Log LOG = LogFactory.getLog(ClientRMProxy.class);
+
+ public static <T> T createRMProxy(final Configuration conf,
+ final Class<T> protocol) throws IOException {
+ InetSocketAddress rmAddress = getRMAddress(conf, protocol);
+ return createRMProxy(conf, protocol, rmAddress);
+ }
+
+ private static InetSocketAddress getRMAddress(Configuration conf, Class<?> protocol) {
+ if (protocol == ApplicationClientProtocol.class) {
+ return conf.getSocketAddr(YarnConfiguration.RM_ADDRESS,
+ YarnConfiguration.DEFAULT_RM_ADDRESS,
+ YarnConfiguration.DEFAULT_RM_PORT);
+ } else if (protocol == ResourceManagerAdministrationProtocol.class) {
+ return conf.getSocketAddr(
+ YarnConfiguration.RM_ADMIN_ADDRESS,
+ YarnConfiguration.DEFAULT_RM_ADMIN_ADDRESS,
+ YarnConfiguration.DEFAULT_RM_ADMIN_PORT);
+ } else if (protocol == ApplicationMasterProtocol.class) {
+ return conf.getSocketAddr(
+ YarnConfiguration.RM_SCHEDULER_ADDRESS,
+ YarnConfiguration.DEFAULT_RM_SCHEDULER_ADDRESS,
+ YarnConfiguration.DEFAULT_RM_SCHEDULER_PORT);
+ } else {
+ String message = "Unsupported protocol found when creating the proxy " +
+ "connection to ResourceManager: " +
+ ((protocol != null) ? protocol.getClass().getName() : "null");
+ LOG.error(message);
+ throw new IllegalStateException(message);
+ }
+ }
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/YarnClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/YarnClient.java
index e8dca61d32a0b..22d80c6e8d90b 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/YarnClient.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/YarnClient.java
@@ -19,7 +19,6 @@
package org.apache.hadoop.yarn.client.api;
import java.io.IOException;
-import java.net.InetSocketAddress;
import java.util.List;
import java.util.Set;
@@ -54,25 +53,6 @@ public static YarnClient createYarnClient() {
return client;
}
- /**
- * Create a new instance of YarnClient.
- */
- @Public
- public static YarnClient createYarnClient(InetSocketAddress rmAddress) {
- YarnClient client = new YarnClientImpl(rmAddress);
- return client;
- }
-
- /**
- * Create a new instance of YarnClient.
- */
- @Public
- public static YarnClient createYarnClient(String name,
- InetSocketAddress rmAddress) {
- YarnClient client = new YarnClientImpl(name, rmAddress);
- return client;
- }
-
@Private
protected YarnClient(String name) {
super(name);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/AMRMClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/AMRMClientImpl.java
index 0f088a0604b6e..4119a0cb1de7e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/AMRMClientImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/AMRMClientImpl.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.yarn.client.api.impl;
import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@@ -42,7 +40,6 @@
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.RPC;
-import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.ApplicationMasterProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
@@ -56,16 +53,16 @@
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
+import org.apache.hadoop.yarn.client.ClientRMProxy;
import org.apache.hadoop.yarn.client.api.AMRMClient;
+import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest;
import org.apache.hadoop.yarn.client.api.InvalidContainerRequestException;
import org.apache.hadoop.yarn.client.api.NMTokenCache;
-import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
-import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.util.RackResolver;
import com.google.common.annotations.VisibleForTesting;
@@ -171,28 +168,11 @@ protected void serviceInit(Configuration conf) throws Exception {
@Override
protected void serviceStart() throws Exception {
final YarnConfiguration conf = new YarnConfiguration(getConfig());
- final YarnRPC rpc = YarnRPC.create(conf);
- final InetSocketAddress rmAddress = conf.getSocketAddr(
- YarnConfiguration.RM_SCHEDULER_ADDRESS,
- YarnConfiguration.DEFAULT_RM_SCHEDULER_ADDRESS,
- YarnConfiguration.DEFAULT_RM_SCHEDULER_PORT);
-
- UserGroupInformation currentUser;
try {
- currentUser = UserGroupInformation.getCurrentUser();
+ rmClient = ClientRMProxy.createRMProxy(conf, ApplicationMasterProtocol.class);
} catch (IOException e) {
throw new YarnRuntimeException(e);
}
-
- // CurrentUser should already have AMToken loaded.
- rmClient = currentUser.doAs(new PrivilegedAction<ApplicationMasterProtocol>() {
- @Override
- public ApplicationMasterProtocol run() {
- return (ApplicationMasterProtocol) rpc.getProxy(ApplicationMasterProtocol.class, rmAddress,
- conf);
- }
- });
- LOG.debug("Connecting to ResourceManager at " + rmAddress);
super.serviceStart();
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java
index b3b8bdf4316bb..4398359862b06 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java
@@ -59,11 +59,12 @@
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.YarnClusterMetrics;
+import org.apache.hadoop.yarn.client.ClientRMProxy;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.client.api.YarnClientApplication;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
-import org.apache.hadoop.yarn.ipc.YarnRPC;
+import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.util.Records;
import com.google.common.annotations.VisibleForTesting;
@@ -81,16 +82,7 @@ public class YarnClientImpl extends YarnClient {
private static final String ROOT = "root";
public YarnClientImpl() {
- this(null);
- }
-
- public YarnClientImpl(InetSocketAddress rmAddress) {
- this(YarnClientImpl.class.getName(), rmAddress);
- }
-
- public YarnClientImpl(String name, InetSocketAddress rmAddress) {
- super(name);
- this.rmAddress = rmAddress;
+ super(YarnClientImpl.class.getName());
}
private static InetSocketAddress getRmAddress(Configuration conf) {
@@ -100,9 +92,7 @@ private static InetSocketAddress getRmAddress(Configuration conf) {
@Override
protected void serviceInit(Configuration conf) throws Exception {
- if (this.rmAddress == null) {
- this.rmAddress = getRmAddress(conf);
- }
+ this.rmAddress = getRmAddress(conf);
statePollIntervalMillis = conf.getLong(
YarnConfiguration.YARN_CLIENT_APP_SUBMISSION_POLL_INTERVAL_MS,
YarnConfiguration.DEFAULT_YARN_CLIENT_APP_SUBMISSION_POLL_INTERVAL_MS);
@@ -111,12 +101,11 @@ protected void serviceInit(Configuration conf) throws Exception {
@Override
protected void serviceStart() throws Exception {
- YarnRPC rpc = YarnRPC.create(getConfig());
-
- this.rmClient = (ApplicationClientProtocol) rpc.getProxy(
- ApplicationClientProtocol.class, rmAddress, getConfig());
- if (LOG.isDebugEnabled()) {
- LOG.debug("Connecting to ResourceManager at " + rmAddress);
+ try {
+ rmClient = ClientRMProxy.createRMProxy(getConfig(),
+ ApplicationClientProtocol.class);
+ } catch (IOException e) {
+ throw new YarnRuntimeException(e);
}
super.serviceStart();
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java
index 6426fe9dbc77e..11335c0d8f68d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java
@@ -19,8 +19,6 @@
package org.apache.hadoop.yarn.client.cli;
import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.security.PrivilegedAction;
import java.util.Arrays;
import org.apache.hadoop.classification.InterfaceAudience.Private;
@@ -31,11 +29,11 @@
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
+import org.apache.hadoop.yarn.client.ClientRMProxy;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
-import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocol;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshAdminAclsRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshNodesRequest;
@@ -164,32 +162,10 @@ private static void printUsage(String cmd) {
}
}
- private static UserGroupInformation getUGI(Configuration conf
- ) throws IOException {
- return UserGroupInformation.getCurrentUser();
- }
-
private ResourceManagerAdministrationProtocol createAdminProtocol() throws IOException {
// Get the current configuration
final YarnConfiguration conf = new YarnConfiguration(getConf());
-
- // Create the client
- final InetSocketAddress addr = conf.getSocketAddr(
- YarnConfiguration.RM_ADMIN_ADDRESS,
- YarnConfiguration.DEFAULT_RM_ADMIN_ADDRESS,
- YarnConfiguration.DEFAULT_RM_ADMIN_PORT);
- final YarnRPC rpc = YarnRPC.create(conf);
-
- ResourceManagerAdministrationProtocol adminProtocol =
- getUGI(conf).doAs(new PrivilegedAction<ResourceManagerAdministrationProtocol>() {
- @Override
- public ResourceManagerAdministrationProtocol run() {
- return (ResourceManagerAdministrationProtocol) rpc.getProxy(ResourceManagerAdministrationProtocol.class,
- addr, conf);
- }
- });
-
- return adminProtocol;
+ return ClientRMProxy.createRMProxy(conf, ResourceManagerAdministrationProtocol.class);
}
private int refreshQueues() throws IOException, YarnException {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/RMProxy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/RMProxy.java
new file mode 100644
index 0000000000000..e4493b5a469b9
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/RMProxy.java
@@ -0,0 +1,125 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.client;
+
+import java.io.IOException;
+import java.net.ConnectException;
+import java.net.InetSocketAddress;
+import java.security.PrivilegedAction;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.retry.RetryPolicies;
+import org.apache.hadoop.io.retry.RetryPolicy;
+import org.apache.hadoop.io.retry.RetryProxy;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
+import org.apache.hadoop.yarn.ipc.YarnRPC;
+
[email protected]
[email protected]
+public class RMProxy<T> {
+
+ private static final Log LOG = LogFactory.getLog(RMProxy.class);
+
+ @SuppressWarnings("unchecked")
+ public static <T> T createRMProxy(final Configuration conf,
+ final Class<T> protocol, InetSocketAddress rmAddress) throws IOException {
+ RetryPolicy retryPolicy = createRetryPolicy(conf);
+ T proxy = RMProxy.<T>getProxy(conf, protocol, rmAddress);
+ LOG.info("Connecting to ResourceManager at " + rmAddress);
+ return (T) RetryProxy.create(protocol, proxy, retryPolicy);
+ }
+
+ @SuppressWarnings("unchecked")
+ protected static <T> T getProxy(final Configuration conf,
+ final Class<T> protocol, final InetSocketAddress rmAddress)
+ throws IOException {
+ return (T) UserGroupInformation.getCurrentUser().doAs(
+ new PrivilegedAction<Object>() {
+
+ @Override
+ public T run() {
+ return (T) YarnRPC.create(conf).getProxy(protocol, rmAddress, conf);
+ }
+ });
+ }
+
+ public static RetryPolicy createRetryPolicy(Configuration conf) {
+ long rmConnectWaitMS =
+ conf.getInt(
+ YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS,
+ YarnConfiguration.DEFAULT_RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS)
+ * 1000;
+ long rmConnectionRetryIntervalMS =
+ conf.getLong(
+ YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS,
+ YarnConfiguration
+ .DEFAULT_RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS)
+ * 1000;
+
+ if (rmConnectionRetryIntervalMS < 0) {
+ throw new YarnRuntimeException("Invalid Configuration. " +
+ YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS +
+ " should not be negative.");
+ }
+
+ boolean waitForEver = (rmConnectWaitMS == -1000);
+
+ if (waitForEver) {
+ return RetryPolicies.RETRY_FOREVER;
+ } else {
+ if (rmConnectWaitMS < 0) {
+ throw new YarnRuntimeException("Invalid Configuration. "
+ + YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS
+ + " can be -1, but can not be other negative numbers");
+ }
+
+ // try connect once
+ if (rmConnectWaitMS < rmConnectionRetryIntervalMS) {
+ LOG.warn(YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS
+ + " is smaller than "
+ + YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS
+ + ". Only try connect once.");
+ rmConnectWaitMS = 0;
+ }
+ }
+
+ RetryPolicy retryPolicy =
+ RetryPolicies.retryUpToMaximumTimeWithFixedSleep(rmConnectWaitMS,
+ rmConnectionRetryIntervalMS,
+ TimeUnit.MILLISECONDS);
+
+ Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap =
+ new HashMap<Class<? extends Exception>, RetryPolicy>();
+ exceptionToPolicyMap.put(ConnectException.class, retryPolicy);
+ //TO DO: after HADOOP-9576, IOException can be changed to EOFException
+ exceptionToPolicyMap.put(IOException.class, retryPolicy);
+
+ return RetryPolicies.retryByException(RetryPolicies.TRY_ONCE_THEN_FAIL,
+ exceptionToPolicyMap);
+ }
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/ServerRMProxy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/ServerRMProxy.java
new file mode 100644
index 0000000000000..ef9154fde1b5f
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/ServerRMProxy.java
@@ -0,0 +1,55 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements. See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership. The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.yarn.server.api;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.client.RMProxy;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+
+public class ServerRMProxy<T> extends RMProxy<T>{
+
+ private static final Log LOG = LogFactory.getLog(ServerRMProxy.class);
+
+ public static <T> T createRMProxy(final Configuration conf,
+ final Class<T> protocol) throws IOException {
+ InetSocketAddress rmAddress = getRMAddress(conf, protocol);
+ return createRMProxy(conf, protocol, rmAddress);
+ }
+
+ private static InetSocketAddress getRMAddress(Configuration conf, Class<?> protocol) {
+ if (protocol == ResourceTracker.class) {
+ return conf.getSocketAddr(
+ YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS,
+ YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS,
+ YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT);
+ }
+ else {
+ String message = "Unsupported protocol found when creating the proxy " +
+ "connection to ResourceManager: " +
+ ((protocol != null) ? protocol.getClass().getName() : "null");
+ LOG.error(message);
+ throw new IllegalStateException(message);
+ }
+ }
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java
index 396204cf2dbdb..40f6874623fdf 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.yarn.server.api.impl.pb.client;
+import java.io.Closeable;
import java.io.IOException;
import java.net.InetSocketAddress;
@@ -41,7 +42,7 @@
import com.google.protobuf.ServiceException;
-public class ResourceTrackerPBClientImpl implements ResourceTracker {
+public class ResourceTrackerPBClientImpl implements ResourceTracker, Closeable {
private ResourceTrackerPB proxy;
@@ -50,7 +51,14 @@ public ResourceTrackerPBClientImpl(long clientVersion, InetSocketAddress addr, C
proxy = (ResourceTrackerPB)RPC.getProxy(
ResourceTrackerPB.class, clientVersion, addr, conf);
}
-
+
+ @Override
+ public void close() {
+ if(this.proxy != null) {
+ RPC.stopProxy(this.proxy);
+ }
+ }
+
@Override
public RegisterNodeManagerResponse registerNodeManager(
RegisterNodeManagerRequest request) throws YarnException,
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java
index 550cdc5a98f4f..b0e71e915633e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java
@@ -19,7 +19,7 @@
package org.apache.hadoop.yarn.server.nodemanager;
import java.io.IOException;
-import java.net.InetSocketAddress;
+import java.net.ConnectException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -33,6 +33,7 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -47,9 +48,9 @@
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
-import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.server.api.ResourceManagerConstants;
import org.apache.hadoop.yarn.server.api.ResourceTracker;
+import org.apache.hadoop.yarn.server.api.ServerRMProxy;
import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequest;
@@ -77,7 +78,6 @@ public class NodeStatusUpdaterImpl extends AbstractService implements
private NodeId nodeId;
private long nextHeartBeatInterval;
private ResourceTracker resourceTracker;
- private InetSocketAddress rmAddress;
private Resource totalResource;
private int httpPort;
private volatile boolean isStopped;
@@ -91,9 +91,6 @@ public class NodeStatusUpdaterImpl extends AbstractService implements
private final NodeHealthCheckerService healthChecker;
private final NodeManagerMetrics metrics;
- private long rmConnectWaitMS;
- private long rmConnectionRetryIntervalMS;
- private boolean waitForEver;
private Runnable statusUpdaterRunnable;
private Thread statusUpdater;
@@ -110,11 +107,6 @@ public NodeStatusUpdaterImpl(Context context, Dispatcher dispatcher,
@Override
protected void serviceInit(Configuration conf) throws Exception {
- this.rmAddress = conf.getSocketAddr(
- YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS,
- YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS,
- YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT);
-
int memoryMb =
conf.getInt(
YarnConfiguration.NM_PMEM_MB, YarnConfiguration.DEFAULT_NM_PMEM_MB);
@@ -153,6 +145,7 @@ protected void serviceStart() throws Exception {
try {
// Registration has to be in start so that ContainerManager can get the
// perNM tokens needed to authenticate ContainerTokens.
+ this.resourceTracker = getRMClient();
registerWithRM();
super.serviceStart();
startStatusUpdater();
@@ -167,6 +160,7 @@ protected void serviceStart() throws Exception {
protected void serviceStop() throws Exception {
// Interrupt the updater.
this.isStopped = true;
+ stopRMProxy();
super.serviceStop();
}
@@ -188,6 +182,13 @@ protected void rebootNodeStatusUpdater() {
}
}
+ @VisibleForTesting
+ protected void stopRMProxy() {
+ if(this.resourceTracker != null) {
+ RPC.stopProxy(this.resourceTracker);
+ }
+ }
+
@Private
protected boolean isTokenKeepAliveEnabled(Configuration conf) {
return conf.getBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED,
@@ -195,93 +196,22 @@ protected boolean isTokenKeepAliveEnabled(Configuration conf) {
&& UserGroupInformation.isSecurityEnabled();
}
- protected ResourceTracker getRMClient() {
+ @VisibleForTesting
+ protected ResourceTracker getRMClient() throws IOException {
Configuration conf = getConfig();
- YarnRPC rpc = YarnRPC.create(conf);
- return (ResourceTracker) rpc.getProxy(ResourceTracker.class, rmAddress,
- conf);
+ return ServerRMProxy.createRMProxy(conf, ResourceTracker.class);
}
@VisibleForTesting
protected void registerWithRM() throws YarnException, IOException {
- Configuration conf = getConfig();
- rmConnectWaitMS =
- conf.getInt(
- YarnConfiguration.RESOURCEMANAGER_CONNECT_WAIT_SECS,
- YarnConfiguration.DEFAULT_RESOURCEMANAGER_CONNECT_WAIT_SECS)
- * 1000;
- rmConnectionRetryIntervalMS =
- conf.getLong(
- YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS,
- YarnConfiguration
- .DEFAULT_RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS)
- * 1000;
-
- if(rmConnectionRetryIntervalMS < 0) {
- throw new YarnRuntimeException("Invalid Configuration. " +
- YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS +
- " should not be negative.");
- }
-
- waitForEver = (rmConnectWaitMS == -1000);
-
- if(! waitForEver) {
- if(rmConnectWaitMS < 0) {
- throw new YarnRuntimeException("Invalid Configuration. " +
- YarnConfiguration.RESOURCEMANAGER_CONNECT_WAIT_SECS +
- " can be -1, but can not be other negative numbers");
- }
-
- //try connect once
- if(rmConnectWaitMS < rmConnectionRetryIntervalMS) {
- LOG.warn(YarnConfiguration.RESOURCEMANAGER_CONNECT_WAIT_SECS
- + " is smaller than "
- + YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS
- + ". Only try connect once.");
- rmConnectWaitMS = 0;
- }
- }
-
- int rmRetryCount = 0;
- long waitStartTime = System.currentTimeMillis();
-
RegisterNodeManagerRequest request =
recordFactory.newRecordInstance(RegisterNodeManagerRequest.class);
request.setHttpPort(this.httpPort);
request.setResource(this.totalResource);
request.setNodeId(this.nodeId);
- RegisterNodeManagerResponse regNMResponse;
-
- while(true) {
- try {
- rmRetryCount++;
- LOG.info("Connecting to ResourceManager at " + this.rmAddress
- + ". current no. of attempts is " + rmRetryCount);
- this.resourceTracker = getRMClient();
- regNMResponse =
- this.resourceTracker.registerNodeManager(request);
- this.rmIdentifier = regNMResponse.getRMIdentifier();
- break;
- } catch(Throwable e) {
- LOG.warn("Trying to connect to ResourceManager, " +
- "current no. of failed attempts is "+rmRetryCount);
- if(System.currentTimeMillis() - waitStartTime < rmConnectWaitMS
- || waitForEver) {
- try {
- LOG.info("Sleeping for " + rmConnectionRetryIntervalMS/1000
- + " seconds before next connection retry to RM");
- Thread.sleep(rmConnectionRetryIntervalMS);
- } catch(InterruptedException ex) {
- //done nothing
- }
- } else {
- String errorMessage = "Failed to Connect to RM, " +
- "no. of failed attempts is "+rmRetryCount;
- LOG.error(errorMessage,e);
- throw new YarnRuntimeException(errorMessage,e);
- }
- }
- }
+ RegisterNodeManagerResponse regNMResponse =
+ resourceTracker.registerNodeManager(request);
+ this.rmIdentifier = regNMResponse.getRMIdentifier();
// if the Resourcemanager instructs NM to shutdown.
if (NodeAction.SHUTDOWN.equals(regNMResponse.getNodeAction())) {
String message =
@@ -426,8 +356,6 @@ public void run() {
// Send heartbeat
try {
NodeHeartbeatResponse response = null;
- int rmRetryCount = 0;
- long waitStartTime = System.currentTimeMillis();
NodeStatus nodeStatus = getNodeStatusAndUpdateContainersInContext();
nodeStatus.setResponseId(lastHeartBeatID);
@@ -440,31 +368,7 @@ public void run() {
request
.setLastKnownNMTokenMasterKey(NodeStatusUpdaterImpl.this.context
.getNMTokenSecretManager().getCurrentKey());
- while (!isStopped) {
- try {
- rmRetryCount++;
- response = resourceTracker.nodeHeartbeat(request);
- break;
- } catch (Throwable e) {
- LOG.warn("Trying to heartbeat to ResourceManager, "
- + "current no. of failed attempts is " + rmRetryCount);
- if(System.currentTimeMillis() - waitStartTime < rmConnectWaitMS
- || waitForEver) {
- try {
- LOG.info("Sleeping for " + rmConnectionRetryIntervalMS/1000
- + " seconds before next heartbeat to RM");
- Thread.sleep(rmConnectionRetryIntervalMS);
- } catch(InterruptedException ex) {
- //done nothing
- }
- } else {
- String errorMessage = "Failed to heartbeat to RM, " +
- "no. of failed attempts is "+rmRetryCount;
- LOG.error(errorMessage,e);
- throw new YarnRuntimeException(errorMessage,e);
- }
- }
- }
+ response = resourceTracker.nodeHeartbeat(request);
//get next heartbeat interval from response
nextHeartBeatInterval = response.getNextHeartBeatInterval();
updateMasterKeys(response);
@@ -508,11 +412,11 @@ public void run() {
dispatcher.getEventHandler().handle(
new CMgrCompletedAppsEvent(appsToCleanup));
}
- } catch (YarnRuntimeException e) {
+ } catch (ConnectException e) {
//catch and throw the exception if tried MAX wait time to connect RM
dispatcher.getEventHandler().handle(
new NodeManagerEvent(NodeManagerEventType.SHUTDOWN));
- throw e;
+ throw new YarnRuntimeException(e);
} catch (Throwable e) {
// TODO Better error handling. Thread can die with the rest of the
// NM still running.
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java
index e93778e2987ef..a3e1faf310e54 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java
@@ -61,6 +61,10 @@ public MockNodeStatusUpdater(Context context, Dispatcher dispatcher,
protected ResourceTracker getRMClient() {
return resourceTracker;
}
+ @Override
+ protected void stopRMProxy() {
+ return;
+ }
private static class MockResourceTracker implements ResourceTracker {
private int heartBeatID;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java
index 668b85b6511bd..294c93ed3b84a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java
@@ -107,6 +107,11 @@ protected ResourceTracker getRMClient() {
return new LocalRMInterface();
};
+ @Override
+ protected void stopRMProxy() {
+ return;
+ }
+
@Override
protected void startStatusUpdater() {
return; // Don't start any updating thread.
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java
index e17131fd3a1dc..2a3e3d579ca03 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java
@@ -41,6 +41,8 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.retry.RetryPolicy;
+import org.apache.hadoop.io.retry.RetryProxy;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.service.ServiceOperations;
@@ -53,6 +55,7 @@
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.client.RMProxy;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.event.EventHandler;
@@ -60,9 +63,9 @@
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
-import org.apache.hadoop.yarn.ipc.RPCUtil;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.server.api.ResourceTracker;
+import org.apache.hadoop.yarn.server.api.ServerRMProxy;
import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequest;
@@ -103,11 +106,17 @@ public class TestNodeStatusUpdater {
volatile int heartBeatID = 0;
volatile Throwable nmStartError = null;
private final List<NodeId> registeredNodes = new ArrayList<NodeId>();
- private final Configuration conf = createNMConfig();
+ private boolean triggered = false;
+ private Configuration conf;
private NodeManager nm;
private boolean containerStatusBackupSuccessfully = true;
private List<ContainerStatus> completedContainerStatusList = new ArrayList<ContainerStatus>();
+ @Before
+ public void setUp() {
+ conf = createNMConfig();
+ }
+
@After
public void tearDown() {
this.registeredNodes.clear();
@@ -274,6 +283,11 @@ public MyNodeStatusUpdater(Context context, Dispatcher dispatcher,
protected ResourceTracker getRMClient() {
return resourceTracker;
}
+
+ @Override
+ protected void stopRMProxy() {
+ return;
+ }
}
private class MyNodeStatusUpdater2 extends NodeStatusUpdaterImpl {
@@ -290,6 +304,10 @@ protected ResourceTracker getRMClient() {
return resourceTracker;
}
+ @Override
+ protected void stopRMProxy() {
+ return;
+ }
}
private class MyNodeStatusUpdater3 extends NodeStatusUpdaterImpl {
@@ -307,7 +325,12 @@ public MyNodeStatusUpdater3(Context context, Dispatcher dispatcher,
protected ResourceTracker getRMClient() {
return resourceTracker;
}
-
+
+ @Override
+ protected void stopRMProxy() {
+ return;
+ }
+
@Override
protected boolean isTokenKeepAliveEnabled(Configuration conf) {
return true;
@@ -315,21 +338,16 @@ protected boolean isTokenKeepAliveEnabled(Configuration conf) {
}
private class MyNodeStatusUpdater4 extends NodeStatusUpdaterImpl {
- public ResourceTracker resourceTracker =
- new MyResourceTracker(this.context);
+
private Context context;
- private long waitStartTime;
private final long rmStartIntervalMS;
private final boolean rmNeverStart;
- private volatile boolean triggered = false;
- private long durationWhenTriggered = -1;
-
+ public ResourceTracker resourceTracker;
public MyNodeStatusUpdater4(Context context, Dispatcher dispatcher,
NodeHealthCheckerService healthChecker, NodeManagerMetrics metrics,
long rmStartIntervalMS, boolean rmNeverStart) {
super(context, dispatcher, healthChecker, metrics);
this.context = context;
- this.waitStartTime = System.currentTimeMillis();
this.rmStartIntervalMS = rmStartIntervalMS;
this.rmNeverStart = rmNeverStart;
}
@@ -337,25 +355,16 @@ public MyNodeStatusUpdater4(Context context, Dispatcher dispatcher,
@Override
protected void serviceStart() throws Exception {
//record the startup time
- this.waitStartTime = System.currentTimeMillis();
super.serviceStart();
}
@Override
- protected ResourceTracker getRMClient() {
- if (!triggered) {
- long t = System.currentTimeMillis();
- long duration = t - waitStartTime;
- if (duration <= rmStartIntervalMS
- || rmNeverStart) {
- throw new YarnRuntimeException("Faking RM start failure as start " +
- "delay timer has not expired.");
- } else {
- //triggering
- triggered = true;
- durationWhenTriggered = duration;
- }
- }
+ protected ResourceTracker getRMClient() throws IOException {
+ RetryPolicy retryPolicy = RMProxy.createRetryPolicy(conf);
+ resourceTracker =
+ (ResourceTracker) RetryProxy.create(ResourceTracker.class,
+ new MyResourceTracker6(this.context, rmStartIntervalMS,
+ rmNeverStart), retryPolicy);
return resourceTracker;
}
@@ -363,37 +372,35 @@ private boolean isTriggered() {
return triggered;
}
- private long getWaitStartTime() {
- return waitStartTime;
- }
-
- private long getDurationWhenTriggered() {
- return durationWhenTriggered;
- }
-
@Override
- public String toString() {
- return "MyNodeStatusUpdater4{" +
- "rmNeverStart=" + rmNeverStart +
- ", triggered=" + triggered +
- ", duration=" + durationWhenTriggered +
- ", rmStartIntervalMS=" + rmStartIntervalMS +
- '}';
+ protected void stopRMProxy() {
+ return;
}
}
+
+
private class MyNodeStatusUpdater5 extends NodeStatusUpdaterImpl {
private ResourceTracker resourceTracker;
+ private Configuration conf;
public MyNodeStatusUpdater5(Context context, Dispatcher dispatcher,
- NodeHealthCheckerService healthChecker, NodeManagerMetrics metrics) {
+ NodeHealthCheckerService healthChecker, NodeManagerMetrics metrics, Configuration conf) {
super(context, dispatcher, healthChecker, metrics);
resourceTracker = new MyResourceTracker5();
+ this.conf = conf;
}
@Override
protected ResourceTracker getRMClient() {
- return resourceTracker;
+ RetryPolicy retryPolicy = RMProxy.createRetryPolicy(conf);
+ return (ResourceTracker) RetryProxy.create(ResourceTracker.class,
+ resourceTracker, retryPolicy);
+ }
+
+ @Override
+ protected void stopRMProxy() {
+ return;
}
}
@@ -417,15 +424,18 @@ private class MyNodeManager2 extends NodeManager {
public boolean isStopped = false;
private NodeStatusUpdater nodeStatusUpdater;
private CyclicBarrier syncBarrier;
- public MyNodeManager2 (CyclicBarrier syncBarrier) {
+ private Configuration conf;
+
+ public MyNodeManager2 (CyclicBarrier syncBarrier, Configuration conf) {
this.syncBarrier = syncBarrier;
+ this.conf = conf;
}
@Override
protected NodeStatusUpdater createNodeStatusUpdater(Context context,
Dispatcher dispatcher, NodeHealthCheckerService healthChecker) {
nodeStatusUpdater =
new MyNodeStatusUpdater5(context, dispatcher, healthChecker,
- metrics);
+ metrics, conf);
return nodeStatusUpdater;
}
@@ -577,7 +587,7 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request)
.get(4).getState() == ContainerState.RUNNING
&& request.getNodeStatus().getContainersStatuses().get(4)
.getContainerId().getId() == 5);
- throw new YarnRuntimeException("Lost the heartbeat response");
+ throw new java.net.ConnectException("Lost the heartbeat response");
} else if (heartBeatID == 2) {
Assert.assertEquals(request.getNodeStatus().getContainersStatuses()
.size(), 7);
@@ -646,7 +656,63 @@ public RegisterNodeManagerResponse registerNodeManager(
public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request)
throws YarnException, IOException {
heartBeatID++;
- throw RPCUtil.getRemoteException("NodeHeartbeat exception");
+ throw new java.net.ConnectException(
+ "NodeHeartbeat exception");
+ }
+ }
+
+ private class MyResourceTracker6 implements ResourceTracker {
+
+ private final Context context;
+ private long rmStartIntervalMS;
+ private boolean rmNeverStart;
+ private final long waitStartTime;
+
+ public MyResourceTracker6(Context context, long rmStartIntervalMS,
+ boolean rmNeverStart) {
+ this.context = context;
+ this.rmStartIntervalMS = rmStartIntervalMS;
+ this.rmNeverStart = rmNeverStart;
+ this.waitStartTime = System.currentTimeMillis();
+ }
+
+ @Override
+ public RegisterNodeManagerResponse registerNodeManager(
+ RegisterNodeManagerRequest request) throws YarnException, IOException,
+ IOException {
+ if (System.currentTimeMillis() - waitStartTime <= rmStartIntervalMS
+ || rmNeverStart) {
+ throw new java.net.ConnectException("Faking RM start failure as start "
+ + "delay timer has not expired.");
+ } else {
+ NodeId nodeId = request.getNodeId();
+ Resource resource = request.getResource();
+ LOG.info("Registering " + nodeId.toString());
+ // NOTE: this really should be checking against the config value
+ InetSocketAddress expected = NetUtils.getConnectAddress(
+ conf.getSocketAddr(YarnConfiguration.NM_ADDRESS, null, -1));
+ Assert.assertEquals(NetUtils.getHostPortString(expected),
+ nodeId.toString());
+ Assert.assertEquals(5 * 1024, resource.getMemory());
+ registeredNodes.add(nodeId);
+
+ RegisterNodeManagerResponse response = recordFactory
+ .newRecordInstance(RegisterNodeManagerResponse.class);
+ triggered = true;
+ return response;
+ }
+ }
+
+ @Override
+ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request)
+ throws YarnException, IOException {
+ NodeStatus nodeStatus = request.getNodeStatus();
+ nodeStatus.setResponseId(heartBeatID++);
+
+ NodeHeartbeatResponse nhResponse = YarnServerBuilderUtils.
+ newNodeHeartbeatResponse(heartBeatID, NodeAction.NORMAL, null,
+ null, null, null, 1000L);
+ return nhResponse;
}
}
@@ -843,8 +909,7 @@ public void testNMConnectionToRM() throws Exception {
final long connectionRetryIntervalSecs = 1;
//Waiting for rmStartIntervalMS, RM will be started
final long rmStartIntervalMS = 2*1000;
- YarnConfiguration conf = createNMConfig();
- conf.setLong(YarnConfiguration.RESOURCEMANAGER_CONNECT_WAIT_SECS,
+ conf.setLong(YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS,
connectionWaitSecs);
conf.setLong(YarnConfiguration
.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS,
@@ -907,8 +972,6 @@ protected NodeStatusUpdater createUpdater(Context context,
}
long duration = System.currentTimeMillis() - waitStartTime;
MyNodeStatusUpdater4 myUpdater = (MyNodeStatusUpdater4) updater;
- Assert.assertTrue("Updater was never started",
- myUpdater.getWaitStartTime()>0);
Assert.assertTrue("NM started before updater triggered",
myUpdater.isTriggered());
Assert.assertTrue("NM should have connected to RM after "
@@ -1037,13 +1100,13 @@ public void testNodeStatusUpdaterRetryAndNMShutdown()
final long connectionWaitSecs = 1;
final long connectionRetryIntervalSecs = 1;
YarnConfiguration conf = createNMConfig();
- conf.setLong(YarnConfiguration.RESOURCEMANAGER_CONNECT_WAIT_SECS,
+ conf.setLong(YarnConfiguration.RESOURCEMANAGER_CONNECT_MAX_WAIT_SECS,
connectionWaitSecs);
conf.setLong(YarnConfiguration
.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS,
connectionRetryIntervalSecs);
CyclicBarrier syncBarrier = new CyclicBarrier(2);
- nm = new MyNodeManager2(syncBarrier);
+ nm = new MyNodeManager2(syncBarrier, conf);
nm.init(conf);
nm.start();
try {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java
index 83d21e1640721..cfcf7f6445e63 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java
@@ -117,6 +117,11 @@ protected ResourceTracker getRMClient() {
return new LocalRMInterface();
};
+ @Override
+ protected void stopRMProxy() {
+ return;
+ }
+
@Override
protected void startStatusUpdater() {
return; // Don't start any updating thread.
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java
index cc529739dea79..144b111f83072 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java
@@ -390,6 +390,11 @@ public RegisterNodeManagerResponse registerNodeManager(
}
};
};
+
+ @Override
+ protected void stopRMProxy() {
+ return;
+ }
};
};
}
|
6418b54f81a9e56242fb78fda4bf95e7b3d4c572
|
spring-framework
|
DataBinder tries ConversionService if- PropertyEditor could not produce required type--Issue: SPR-13042-
|
a
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-beans/src/main/java/org/springframework/beans/TypeConverterDelegate.java b/spring-beans/src/main/java/org/springframework/beans/TypeConverterDelegate.java
index ff4cfa681afc..149d7ec3e670 100644
--- a/spring-beans/src/main/java/org/springframework/beans/TypeConverterDelegate.java
+++ b/spring-beans/src/main/java/org/springframework/beans/TypeConverterDelegate.java
@@ -159,29 +159,28 @@ public <T> T convertIfNecessary(
public <T> T convertIfNecessary(String propertyName, Object oldValue, Object newValue,
Class<T> requiredType, TypeDescriptor typeDescriptor) throws IllegalArgumentException {
- Object convertedValue = newValue;
-
// Custom editor for this type?
PropertyEditor editor = this.propertyEditorRegistry.findCustomEditor(requiredType, propertyName);
- ConversionFailedException firstAttemptEx = null;
+ ConversionFailedException conversionAttemptEx = null;
// No custom editor but custom ConversionService specified?
ConversionService conversionService = this.propertyEditorRegistry.getConversionService();
- if (editor == null && conversionService != null && convertedValue != null && typeDescriptor != null) {
+ if (editor == null && conversionService != null && newValue != null && typeDescriptor != null) {
TypeDescriptor sourceTypeDesc = TypeDescriptor.forObject(newValue);
- TypeDescriptor targetTypeDesc = typeDescriptor;
- if (conversionService.canConvert(sourceTypeDesc, targetTypeDesc)) {
+ if (conversionService.canConvert(sourceTypeDesc, typeDescriptor)) {
try {
- return (T) conversionService.convert(convertedValue, sourceTypeDesc, targetTypeDesc);
+ return (T) conversionService.convert(newValue, sourceTypeDesc, typeDescriptor);
}
catch (ConversionFailedException ex) {
// fallback to default conversion logic below
- firstAttemptEx = ex;
+ conversionAttemptEx = ex;
}
}
}
+ Object convertedValue = newValue;
+
// Value not of required type?
if (editor != null || (requiredType != null && !ClassUtils.isAssignableValue(requiredType, convertedValue))) {
if (requiredType != null && Collection.class.isAssignableFrom(requiredType) && convertedValue instanceof String) {
@@ -233,7 +232,7 @@ else if (convertedValue instanceof Map) {
return (T) convertedValue.toString();
}
else if (convertedValue instanceof String && !requiredType.isInstance(convertedValue)) {
- if (firstAttemptEx == null && !requiredType.isInterface() && !requiredType.isEnum()) {
+ if (conversionAttemptEx == null && !requiredType.isInterface() && !requiredType.isEnum()) {
try {
Constructor<T> strCtor = requiredType.getConstructor(String.class);
return BeanUtils.instantiateClass(strCtor, convertedValue);
@@ -272,9 +271,19 @@ else if (convertedValue instanceof Number && Number.class.isAssignableFrom(requi
}
if (!ClassUtils.isAssignableValue(requiredType, convertedValue)) {
- if (firstAttemptEx != null) {
- throw firstAttemptEx;
+ if (conversionAttemptEx != null) {
+ // Original exception from former ConversionService call above...
+ throw conversionAttemptEx;
}
+ else if (conversionService != null) {
+ // ConversionService not tried before, probably custom editor found
+ // but editor couldn't produce the required type...
+ TypeDescriptor sourceTypeDesc = TypeDescriptor.forObject(newValue);
+ if (conversionService.canConvert(sourceTypeDesc, typeDescriptor)) {
+ return (T) conversionService.convert(newValue, sourceTypeDesc, typeDescriptor);
+ }
+ }
+
// Definitely doesn't match: throw IllegalArgumentException/IllegalStateException
StringBuilder msg = new StringBuilder();
msg.append("Cannot convert value of type [").append(ClassUtils.getDescriptiveType(newValue));
@@ -295,12 +304,12 @@ else if (convertedValue instanceof Number && Number.class.isAssignableFrom(requi
}
}
- if (firstAttemptEx != null) {
+ if (conversionAttemptEx != null) {
if (editor == null && !standardConversion && requiredType != null && !Object.class.equals(requiredType)) {
- throw firstAttemptEx;
+ throw conversionAttemptEx;
}
logger.debug("Original ConversionService attempt failed - ignored since " +
- "PropertyEditor based conversion eventually succeeded", firstAttemptEx);
+ "PropertyEditor based conversion eventually succeeded", conversionAttemptEx);
}
return (T) convertedValue;
diff --git a/spring-context/src/test/java/org/springframework/validation/DataBinderTests.java b/spring-context/src/test/java/org/springframework/validation/DataBinderTests.java
index 291f8582afec..aecc61995a77 100644
--- a/spring-context/src/test/java/org/springframework/validation/DataBinderTests.java
+++ b/spring-context/src/test/java/org/springframework/validation/DataBinderTests.java
@@ -43,12 +43,15 @@
import org.springframework.beans.NullValueInNestedPathException;
import org.springframework.beans.propertyeditors.CustomCollectionEditor;
import org.springframework.beans.propertyeditors.CustomNumberEditor;
+import org.springframework.beans.propertyeditors.StringTrimmerEditor;
import org.springframework.context.i18n.LocaleContextHolder;
import org.springframework.context.support.ResourceBundleMessageSource;
import org.springframework.context.support.StaticMessageSource;
+import org.springframework.core.convert.converter.Converter;
import org.springframework.core.convert.support.DefaultConversionService;
import org.springframework.format.Formatter;
import org.springframework.format.number.NumberStyleFormatter;
+import org.springframework.format.support.DefaultFormattingConversionService;
import org.springframework.format.support.FormattingConversionService;
import org.springframework.tests.sample.beans.BeanWithObjectProperty;
import org.springframework.tests.sample.beans.DerivedTestBean;
@@ -592,6 +595,19 @@ public String print(String object, Locale locale) {
assertEquals("test", binder.getBindingResult().getFieldValue("name"));
}
+ @Test
+ public void testConversionWithInappropriateStringEditor() {
+ DataBinder dataBinder = new DataBinder(null);
+ DefaultFormattingConversionService conversionService = new DefaultFormattingConversionService();
+ dataBinder.setConversionService(conversionService);
+ dataBinder.registerCustomEditor(String.class, new StringTrimmerEditor(true));
+
+ NameBean bean = new NameBean("Fred");
+ assertEquals("ConversionService should have invoked toString()", "Fred", dataBinder.convertIfNecessary(bean, String.class));
+ conversionService.addConverter(new NameBeanConverter());
+ assertEquals("Type converter should have been used", "[Fred]", dataBinder.convertIfNecessary(bean, String.class));
+ }
+
@Test
public void testBindingWithAllowedFields() throws Exception {
TestBean rod = new TestBean();
@@ -2087,4 +2103,30 @@ public Map<String, Object> getF() {
}
}
+
+ public static class NameBean {
+
+ private final String name;
+
+ public NameBean(String name) {
+ this.name = name;
+ }
+ public String getName() {
+ return name;
+ }
+ @Override
+ public String toString() {
+ return name;
+ }
+ }
+
+
+ public static class NameBeanConverter implements Converter<NameBean, String> {
+
+ @Override
+ public String convert(NameBean source) {
+ return "[" + source.getName() + "]";
+ }
+ }
+
}
|
d35b3e7d8b5fd7b509ed5cb78937b29dbe240210
|
ReactiveX-RxJava
|
Fix non-deterministic unit test--
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/test/java/rx/concurrency/TestSchedulers.java b/rxjava-core/src/test/java/rx/concurrency/TestSchedulers.java
index a92033dcd1..bec93cfdcd 100644
--- a/rxjava-core/src/test/java/rx/concurrency/TestSchedulers.java
+++ b/rxjava-core/src/test/java/rx/concurrency/TestSchedulers.java
@@ -17,11 +17,17 @@
import static org.junit.Assert.*;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
import org.junit.Test;
import rx.Observable;
+import rx.Observer;
+import rx.Subscription;
+import rx.subscriptions.Subscriptions;
import rx.util.functions.Action1;
import rx.util.functions.Func1;
@@ -181,7 +187,7 @@ public void call(String t) {
}
@Test
- public void testSubscribeWithScheduler1() {
+ public void testSubscribeWithScheduler1() throws InterruptedException {
final AtomicInteger count = new AtomicInteger();
@@ -204,16 +210,39 @@ public void call(Integer t) {
// now we'll subscribe with a scheduler and it should be async
+ final String currentThreadName = Thread.currentThread().getName();
+
+ // latches for deterministically controlling the test below across threads
+ final CountDownLatch latch = new CountDownLatch(5);
+ final CountDownLatch first = new CountDownLatch(1);
+
o1.subscribe(new Action1<Integer>() {
@Override
public void call(Integer t) {
+ try {
+ // we block the first one so we can assert this executes asynchronously with a count
+ first.await(1000, TimeUnit.SECONDS);
+ } catch (InterruptedException e) {
+ throw new RuntimeException("The latch should have released if we are async.", e);
+ }
+ assertFalse(Thread.currentThread().getName().equals(currentThreadName));
+ assertTrue(Thread.currentThread().getName().startsWith("RxComputationThreadPool"));
System.out.println("Thread: " + Thread.currentThread().getName());
System.out.println("t: " + t);
count.incrementAndGet();
+ latch.countDown();
}
}, Schedulers.threadPoolForComputation());
+ // assert we are async
assertEquals(0, count.get());
+ // release the latch so it can go forward
+ first.countDown();
+
+ // wait for all 5 responses
+ latch.await();
+ assertEquals(5, count.get());
}
+
}
|
e49ebd5a789d335363404335055b23a41d07207f
|
camel
|
Removed the System.out.println of the test--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1206115 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/camel
|
diff --git a/tests/camel-itest/src/test/java/org/apache/camel/itest/jms/DynamicRouteTest.java b/tests/camel-itest/src/test/java/org/apache/camel/itest/jms/DynamicRouteTest.java
index c795fe56bd84b..12670bc0b1999 100644
--- a/tests/camel-itest/src/test/java/org/apache/camel/itest/jms/DynamicRouteTest.java
+++ b/tests/camel-itest/src/test/java/org/apache/camel/itest/jms/DynamicRouteTest.java
@@ -90,7 +90,7 @@ public String bar() {
public static class MyDynamicRouter {
public String route(String methodName, @Header(Exchange.SLIP_ENDPOINT) String previous) {
- System.out.println("method name is " + methodName + " previous " + previous);
+
if (previous != null && previous.startsWith("bean://myBean?method")) {
// we get the result here and stop routing
return null;
|
dee6fa9c5b599abf685527e357ffb7e89494a1e1
|
ReactiveX-RxJava
|
Implemented range operator--
|
a
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/Observable.java b/rxjava-core/src/main/java/rx/Observable.java
index 0fd95c0068..0df09cb3c7 100644
--- a/rxjava-core/src/main/java/rx/Observable.java
+++ b/rxjava-core/src/main/java/rx/Observable.java
@@ -53,6 +53,7 @@
import rx.operators.OperationZip;
import rx.util.AtomicObservableSubscription;
import rx.util.AtomicObserver;
+import rx.util.Range;
import rx.util.functions.Action0;
import rx.util.functions.Action1;
import rx.util.functions.Func1;
@@ -547,6 +548,20 @@ public static <T> Observable<T> from(T... items) {
return toObservable(items);
}
+ /**
+ * Generates an observable sequence of integral numbers within a specified range.
+ *
+ * @param start The value of the first integer in the sequence
+ * @param count The number of sequential integers to generate.
+ *
+ * @return An observable sequence that contains a range of sequential integral numbers.
+ *
+ * @see <a href="http://msdn.microsoft.com/en-us/library/hh229460(v=vs.103).aspx">Observable.Range Method (Int32, Int32)</a>
+ */
+ public static Observable<Integer> range(int start, int count) {
+ return from(Range.createWithCount(start, count));
+ }
+
/**
* Returns an Observable that notifies an {@link Observer} of a single value and then completes.
* <p>
diff --git a/rxjava-core/src/main/java/rx/util/Range.java b/rxjava-core/src/main/java/rx/util/Range.java
new file mode 100644
index 0000000000..bfc1bad44d
--- /dev/null
+++ b/rxjava-core/src/main/java/rx/util/Range.java
@@ -0,0 +1,107 @@
+/**
+ * Copyright 2013 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package rx.util;
+
+import org.junit.Test;
+
+import java.util.*;
+
+import static org.junit.Assert.assertEquals;
+
+public final class Range implements Iterable<Integer> {
+ private final int start;
+ private final int end;
+ private final int step;
+
+ public static Range createWithCount(int start, int count) {
+ return create(start, start * (count + 1));
+ }
+
+ public static Range create(int start, int end) {
+ return new Range(start, end, 1);
+ }
+
+ public static Range createWithStep(int start, int end, int step) {
+ return new Range(start, end, step);
+ }
+
+ private Range(int start, int end, int step) {
+ this.start = start;
+ this.end = end;
+ this.step = step;
+ }
+
+ @Override
+ public Iterator<Integer> iterator() {
+ return new Iterator<Integer>() {
+ private int current = start;
+
+ @Override
+ public boolean hasNext() {
+ return current < end;
+ }
+
+ @Override
+ public Integer next() {
+ if (!hasNext()) {
+ throw new NoSuchElementException("No more elements");
+ }
+ int result = current;
+ current += step;
+ return result;
+ }
+
+ @Override
+ public void remove() {
+ throw new UnsupportedOperationException("Read only iterator");
+ }
+ };
+ }
+
+ @Override
+ public String toString() {
+ return "Range (" + start + ", " + end + "), step " + step;
+ }
+
+
+ public static class UnitTest {
+
+ @Test
+ public void testSimpleRange() {
+ assertEquals(Arrays.asList(1, 2, 3, 4), toList(Range.create(1, 5)));
+ }
+
+ @Test
+ public void testRangeWithStep() {
+ assertEquals(Arrays.asList(1, 3, 5, 7, 9), toList(Range.createWithStep(1, 10, 2)));
+ }
+
+ @Test
+ public void testRangeWithCount() {
+ assertEquals(Arrays.asList(1, 2, 3, 4, 5), toList(Range.createWithCount(1, 5)));
+ }
+
+
+ private static <T> List<T> toList(Iterable<T> iterable) {
+ List<T> result = new ArrayList<T>();
+ for (T element : iterable) {
+ result.add(element);
+ }
+ return result;
+ }
+
+ }
+}
\ No newline at end of file
|
cb0d7d4735665fa8ca1b59555a06354859c0045a
|
elasticsearch
|
inital support for zen discovery module- (multicast discovery implemented)--
|
a
|
https://github.com/elastic/elasticsearch
|
diff --git a/.idea/dictionaries/kimchy.xml b/.idea/dictionaries/kimchy.xml
index d77ade9283014..4e8f214db02c1 100644
--- a/.idea/dictionaries/kimchy.xml
+++ b/.idea/dictionaries/kimchy.xml
@@ -1,20 +1,24 @@
<component name="ProjectDictionaryState">
<dictionary name="kimchy">
<words>
+ <w>addr</w>
<w>args</w>
<w>asciifolding</w>
<w>attr</w>
<w>banon</w>
+ <w>bindhost</w>
<w>birthdate</w>
<w>bool</w>
<w>booleans</w>
<w>camelcase</w>
+ <w>canonicalhost</w>
<w>checksum</w>
<w>closeable</w>
<w>commitable</w>
<w>committable</w>
<w>configurator</w>
<w>coord</w>
+ <w>datagram</w>
<w>desc</w>
<w>deserialize</w>
<w>elasticsearch</w>
@@ -26,6 +30,7 @@
<w>indices</w>
<w>inet</w>
<w>infos</w>
+ <w>intf</w>
<w>iter</w>
<w>jgroups</w>
<w>joda</w>
@@ -33,12 +38,14 @@
<w>kimchy</w>
<w>lifecycle</w>
<w>linefeeds</w>
+ <w>loopback</w>
<w>lucene</w>
<w>memcached</w>
<w>metadata</w>
<w>millis</w>
<w>mmap</w>
<w>multi</w>
+ <w>multicast</w>
<w>multiline</w>
<w>nanos</w>
<w>newcount</w>
@@ -49,6 +56,7 @@
<w>pluggable</w>
<w>plugins</w>
<w>porterstem</w>
+ <w>publishhost</w>
<w>rebalance</w>
<w>sbuf</w>
<w>searchable</w>
diff --git a/.idea/projectCodeStyle.xml b/.idea/projectCodeStyle.xml
index f98da4cdac39c..4164e9dc0e18f 100644
--- a/.idea/projectCodeStyle.xml
+++ b/.idea/projectCodeStyle.xml
@@ -61,6 +61,15 @@
<option name="LABEL_INDENT_SIZE" value="0" />
<option name="LABEL_INDENT_ABSOLUTE" value="false" />
</ADDITIONAL_INDENT_OPTIONS>
+ <ADDITIONAL_INDENT_OPTIONS fileType="php">
+ <option name="INDENT_SIZE" value="4" />
+ <option name="CONTINUATION_INDENT_SIZE" value="8" />
+ <option name="TAB_SIZE" value="4" />
+ <option name="USE_TAB_CHARACTER" value="false" />
+ <option name="SMART_TABS" value="false" />
+ <option name="LABEL_INDENT_SIZE" value="0" />
+ <option name="LABEL_INDENT_ABSOLUTE" value="false" />
+ </ADDITIONAL_INDENT_OPTIONS>
<ADDITIONAL_INDENT_OPTIONS fileType="scala">
<option name="INDENT_SIZE" value="2" />
<option name="CONTINUATION_INDENT_SIZE" value="8" />
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/ClusterService.java b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/ClusterService.java
index 6d9242b352e7b..9a064c49674bc 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/ClusterService.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/ClusterService.java
@@ -23,7 +23,7 @@
import org.elasticsearch.util.component.LifecycleComponent;
/**
- * @author kimchy (Shay Banon)
+ * @author kimchy (shay.banon)
*/
public interface ClusterService extends LifecycleComponent<ClusterService> {
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/metadata/MetaDataService.java b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/metadata/MetaDataService.java
index 3ba9549e356b5..6afb6ba527e6f 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/metadata/MetaDataService.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/metadata/MetaDataService.java
@@ -319,7 +319,11 @@ public synchronized void updateMapping(final String index, final String type, fi
}
// build the updated mapping source
final String updatedMappingSource = existingMapper.buildSource();
- logger.info("Index [" + index + "]: Update mapping [" + type + "] (dynamic) with source [" + updatedMappingSource + "]");
+ if (logger.isDebugEnabled()) {
+ logger.debug("Index [" + index + "]: Update mapping [" + type + "] (dynamic) with source [" + updatedMappingSource + "]");
+ } else if (logger.isInfoEnabled()) {
+ logger.info("Index [" + index + "]: Update mapping [" + type + "] (dynamic)");
+ }
// publish the new mapping
clusterService.submitStateUpdateTask("update-mapping [" + index + "][" + type + "]", new ClusterStateUpdateTask() {
@Override public ClusterState execute(ClusterState currentState) {
@@ -391,7 +395,11 @@ public synchronized PutMappingResult putMapping(final String[] indices, String m
mapping = new Tuple<String, String>(newMapper.type(), newMapper.buildSource());
}
mappings.put(index, mapping);
- logger.info("Index [" + index + "]: Put mapping [" + mapping.v1() + "] with source [" + mapping.v2() + "]");
+ if (logger.isDebugEnabled()) {
+ logger.debug("Index [" + index + "]: Put mapping [" + mapping.v1() + "] with source [" + mapping.v2() + "]");
+ } else if (logger.isInfoEnabled()) {
+ logger.info("Index [" + index + "]: Put mapping [" + mapping.v1() + "]");
+ }
}
final CountDownLatch latch = new CountDownLatch(clusterService.state().nodes().size() * indices.length);
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/jgroups/JgroupsDiscovery.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/jgroups/JgroupsDiscovery.java
index a449340ade6e5..c8b18942a5846 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/jgroups/JgroupsDiscovery.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/jgroups/JgroupsDiscovery.java
@@ -31,7 +31,7 @@
import org.elasticsearch.env.Environment;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.util.component.AbstractLifecycleComponent;
-import org.elasticsearch.util.io.HostResolver;
+import org.elasticsearch.util.io.NetworkUtils;
import org.elasticsearch.util.io.stream.BytesStreamInput;
import org.elasticsearch.util.io.stream.BytesStreamOutput;
import org.elasticsearch.util.settings.Settings;
@@ -109,8 +109,8 @@ public class JgroupsDiscovery extends AbstractLifecycleComponent<Discovery> impl
if (System.getProperty("jgroups.bind_addr") == null) {
// automatically set the bind address based on ElasticSearch default bindings...
try {
- InetAddress bindAddress = HostResolver.resolveBindHostAddress(null, settings, HostResolver.LOCAL_IP);
- if ((bindAddress instanceof Inet4Address && HostResolver.isIPv4()) || (bindAddress instanceof Inet6Address && !HostResolver.isIPv4())) {
+ InetAddress bindAddress = NetworkUtils.resolveBindHostAddress(null, settings, NetworkUtils.LOCAL);
+ if ((bindAddress instanceof Inet4Address && NetworkUtils.isIPv4()) || (bindAddress instanceof Inet6Address && !NetworkUtils.isIPv4())) {
sysPropsSet.put("jgroups.bind_addr", bindAddress.getHostAddress());
System.setProperty("jgroups.bind_addr", bindAddress.getHostAddress());
}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/local/LocalDiscoveryModule.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/local/LocalDiscoveryModule.java
index 93a9e88ad2004..14cab3dc2d9cd 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/local/LocalDiscoveryModule.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/local/LocalDiscoveryModule.java
@@ -23,7 +23,7 @@
import org.elasticsearch.discovery.Discovery;
/**
- * @author kimchy (Shay Banon)
+ * @author kimchy (shay.banon)
*/
public class LocalDiscoveryModule extends AbstractModule {
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/DiscoveryNodesProvider.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/DiscoveryNodesProvider.java
new file mode 100644
index 0000000000000..eb3f2b0a2c5bc
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/DiscoveryNodesProvider.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.discovery.zen;
+
+import org.elasticsearch.cluster.node.DiscoveryNodes;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public interface DiscoveryNodesProvider {
+
+ DiscoveryNodes nodes();
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java
new file mode 100644
index 0000000000000..7045c0d2d5832
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java
@@ -0,0 +1,439 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.discovery.zen;
+
+import com.google.inject.Inject;
+import org.elasticsearch.ElasticSearchException;
+import org.elasticsearch.ElasticSearchIllegalStateException;
+import org.elasticsearch.cluster.*;
+import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.discovery.Discovery;
+import org.elasticsearch.discovery.InitialStateDiscoveryListener;
+import org.elasticsearch.discovery.zen.elect.ElectMasterService;
+import org.elasticsearch.discovery.zen.fd.MasterFaultDetection;
+import org.elasticsearch.discovery.zen.fd.NodesFaultDetection;
+import org.elasticsearch.discovery.zen.membership.MembershipAction;
+import org.elasticsearch.discovery.zen.ping.ZenPing;
+import org.elasticsearch.discovery.zen.ping.ZenPingService;
+import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.transport.TransportService;
+import org.elasticsearch.util.TimeValue;
+import org.elasticsearch.util.UUID;
+import org.elasticsearch.util.component.AbstractLifecycleComponent;
+import org.elasticsearch.util.settings.Settings;
+
+import java.util.List;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import static com.google.common.collect.Lists.*;
+import static org.elasticsearch.cluster.ClusterState.*;
+import static org.elasticsearch.cluster.node.DiscoveryNodes.*;
+import static org.elasticsearch.util.TimeValue.*;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class ZenDiscovery extends AbstractLifecycleComponent<Discovery> implements Discovery, DiscoveryNodesProvider {
+
+ private final TransportService transportService;
+
+ private final ClusterService clusterService;
+
+ private final ClusterName clusterName;
+
+ private final ZenPingService pingService;
+
+ private final MasterFaultDetection masterFD;
+
+ private final NodesFaultDetection nodesFD;
+
+ private final PublishClusterStateAction publishClusterState;
+
+ private final MembershipAction membership;
+
+
+ private final TimeValue initialPingTimeout;
+
+ private final ElectMasterService electMaster;
+
+
+ private DiscoveryNode localNode;
+
+ private final CopyOnWriteArrayList<InitialStateDiscoveryListener> initialStateListeners = new CopyOnWriteArrayList<InitialStateDiscoveryListener>();
+
+ private volatile boolean master = false;
+
+ private volatile boolean firstMaster = false;
+
+ private volatile DiscoveryNodes latestDiscoNodes;
+
+ private final AtomicBoolean initialStateSent = new AtomicBoolean();
+
+ @Inject public ZenDiscovery(Settings settings, ClusterName clusterName, ThreadPool threadPool,
+ TransportService transportService, ClusterService clusterService,
+ ZenPingService pingService) {
+ super(settings);
+ this.clusterName = clusterName;
+ this.clusterService = clusterService;
+ this.transportService = transportService;
+ this.pingService = pingService;
+
+ this.initialPingTimeout = componentSettings.getAsTime("initial_ping_timeout", timeValueSeconds(3));
+
+ this.electMaster = new ElectMasterService(settings);
+
+ this.masterFD = new MasterFaultDetection(settings, threadPool, transportService, this);
+ this.masterFD.addListener(new MasterNodeFailureListener());
+
+ this.nodesFD = new NodesFaultDetection(settings, threadPool, transportService);
+ this.nodesFD.addListener(new NodeFailureListener());
+
+ this.publishClusterState = new PublishClusterStateAction(settings, transportService, this, new NewClusterStateListener());
+ this.pingService.setNodesProvider(this);
+ this.membership = new MembershipAction(settings, transportService, new MembershipListener());
+ }
+
+ @Override protected void doStart() throws ElasticSearchException {
+ localNode = new DiscoveryNode(settings.get("name"), settings.getAsBoolean("node.data", !settings.getAsBoolean("node.client", false)), UUID.randomUUID().toString(), transportService.boundAddress().publishAddress());
+ pingService.start();
+
+ boolean retry = true;
+ while (retry) {
+ retry = false;
+ DiscoveryNode masterNode = pingTillMasterResolved();
+ if (localNode.equals(masterNode)) {
+ // we are the master (first)
+ this.firstMaster = true;
+ this.master = true;
+ nodesFD.start(); // start the nodes FD
+ clusterService.submitStateUpdateTask("zen-disco-initial_connect(master)", new ProcessedClusterStateUpdateTask() {
+ @Override public ClusterState execute(ClusterState currentState) {
+ DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder()
+ .localNodeId(localNode.id())
+ .masterNodeId(localNode.id())
+ // put our local node
+ .put(localNode);
+ // update the fact that we are the master...
+ latestDiscoNodes = builder.build();
+ return newClusterStateBuilder().state(currentState).nodes(builder).build();
+ }
+
+ @Override public void clusterStateProcessed(ClusterState clusterState) {
+ sendInitialStateEventIfNeeded();
+ }
+ });
+ } else {
+ this.firstMaster = false;
+ this.master = false;
+ try {
+ // first, make sure we can connect to the master
+ transportService.connectToNode(masterNode);
+ } catch (Exception e) {
+ logger.warn("Failed to connect to master [{}], retrying...", e, masterNode);
+ retry = true;
+ continue;
+ }
+ // send join request
+ try {
+ membership.sendJoinRequestBlocking(masterNode, localNode, initialPingTimeout);
+ } catch (Exception e) {
+ logger.warn("Failed to send join request to master [{}], retrying...", e, masterNode);
+ // failed to send the join request, retry
+ retry = true;
+ continue;
+ }
+ // cool, we found a master, start an FD on it
+ masterFD.start(masterNode);
+ }
+ }
+ }
+
+ @Override protected void doStop() throws ElasticSearchException {
+ pingService.stop();
+ if (masterFD.masterNode() != null) {
+ masterFD.stop();
+ }
+ nodesFD.stop();
+ initialStateSent.set(false);
+ if (!master) {
+ try {
+ membership.sendLeaveRequestBlocking(latestDiscoNodes.masterNode(), localNode, TimeValue.timeValueSeconds(1));
+ } catch (Exception e) {
+ logger.debug("Failed to send leave request to master [{}]", e, latestDiscoNodes.masterNode());
+ }
+ } else {
+ DiscoveryNode[] possibleMasters = electMaster.nextPossibleMasters(latestDiscoNodes.nodes().values(), 3);
+ for (DiscoveryNode possibleMaster : possibleMasters) {
+ if (localNode.equals(possibleMaster)) {
+ continue;
+ }
+ try {
+ membership.sendLeaveRequest(latestDiscoNodes.masterNode(), possibleMaster);
+ } catch (Exception e) {
+ logger.debug("Failed to send leave request from master [{}] to possible master [{}]", e, latestDiscoNodes.masterNode(), possibleMaster);
+ }
+ }
+ }
+ master = false;
+ }
+
+ @Override protected void doClose() throws ElasticSearchException {
+ masterFD.close();
+ nodesFD.close();
+ publishClusterState.close();
+ membership.close();
+ pingService.close();
+ }
+
+ @Override public void addListener(InitialStateDiscoveryListener listener) {
+ this.initialStateListeners.add(listener);
+ }
+
+ @Override public void removeListener(InitialStateDiscoveryListener listener) {
+ this.initialStateListeners.remove(listener);
+ }
+
+ @Override public String nodeDescription() {
+ return clusterName.value() + "/" + localNode.id();
+ }
+
+ @Override public boolean firstMaster() {
+ return firstMaster;
+ }
+
+ @Override public DiscoveryNodes nodes() {
+ DiscoveryNodes latestNodes = this.latestDiscoNodes;
+ if (latestNodes != null) {
+ return latestNodes;
+ }
+ // have not decided yet, just send the local node
+ return newNodesBuilder().put(localNode).localNodeId(localNode.id()).build();
+ }
+
+ @Override public void publish(ClusterState clusterState) {
+ if (!master) {
+ throw new ElasticSearchIllegalStateException("Shouldn't publish state when not master");
+ }
+ latestDiscoNodes = clusterState.nodes();
+ nodesFD.updateNodes(clusterState.nodes());
+ publishClusterState.publish(clusterState);
+ }
+
+ private void handleNodeFailure(final DiscoveryNode node) {
+ if (!master) {
+ // nothing to do here...
+ return;
+ }
+ clusterService.submitStateUpdateTask("zen-disco-node_failed(" + node + ")", new ProcessedClusterStateUpdateTask() {
+ @Override public ClusterState execute(ClusterState currentState) {
+ DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder()
+ .putAll(currentState.nodes())
+ .remove(node.id());
+ latestDiscoNodes = builder.build();
+ return newClusterStateBuilder().state(currentState).nodes(latestDiscoNodes).build();
+ }
+
+ @Override public void clusterStateProcessed(ClusterState clusterState) {
+ sendInitialStateEventIfNeeded();
+ }
+ });
+ }
+
+ private void handleMasterGone(final DiscoveryNode masterNode, String reason) {
+ if (master) {
+ // we might get this on both a master telling us shutting down, and then the disconnect failure
+ return;
+ }
+
+ logger.info("Master [{}] left, reason [{}]", masterNode, reason);
+ List<DiscoveryNode> nodes = newArrayList(latestDiscoNodes.nodes().values());
+ nodes.remove(masterNode); // remove the master node from the list, it has failed
+ // sort then
+ DiscoveryNode electedMaster = electMaster.electMaster(nodes);
+ if (localNode.equals(electedMaster)) {
+ this.master = true;
+ masterFD.stop();
+ nodesFD.start();
+ clusterService.submitStateUpdateTask("zen-disco-elected_as_master(old master [" + masterNode + "])", new ProcessedClusterStateUpdateTask() {
+ @Override public ClusterState execute(ClusterState currentState) {
+ DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder()
+ .putAll(currentState.nodes())
+ // make sure the old master node, which has failed, is not part of the nodes we publish
+ .remove(masterNode.id())
+ .masterNodeId(localNode.id());
+ // update the fact that we are the master...
+ latestDiscoNodes = builder.build();
+ return newClusterStateBuilder().state(currentState).nodes(latestDiscoNodes).build();
+ }
+
+ @Override public void clusterStateProcessed(ClusterState clusterState) {
+ sendInitialStateEventIfNeeded();
+ }
+ });
+ } else {
+ nodesFD.stop();
+ if (electedMaster != null) {
+ // we are not the master, start FD against the possible master
+ masterFD.restart(electedMaster);
+ } else {
+ masterFD.stop();
+ }
+ }
+ }
+
+ void handleNewClusterState(final ClusterState clusterState) {
+ if (master) {
+ logger.warn("Master should not receive new cluster state from [{}]", clusterState.nodes().masterNode());
+ } else {
+ latestDiscoNodes = clusterState.nodes();
+
+ // check to see that we monitor the correct master of the cluster
+ if (masterFD.masterNode() != null && masterFD.masterNode().equals(latestDiscoNodes.masterNode())) {
+ masterFD.restart(latestDiscoNodes.masterNode());
+ }
+
+ if (clusterState.nodes().localNode() == null) {
+ logger.warn("Received a cluster state from [{}] and not part of the cluster, should not happen", clusterState.nodes().masterNode());
+ } else {
+ clusterService.submitStateUpdateTask("zen-disco-receive(from [" + clusterState.nodes().masterNode() + "])", new ProcessedClusterStateUpdateTask() {
+ @Override public ClusterState execute(ClusterState currentState) {
+ return clusterState;
+ }
+
+ @Override public void clusterStateProcessed(ClusterState clusterState) {
+ sendInitialStateEventIfNeeded();
+ }
+ });
+ }
+ }
+ }
+
+ private void handleLeaveRequest(final DiscoveryNode node) {
+ if (master) {
+ clusterService.submitStateUpdateTask("zen-disco-node_failed(" + node + ")", new ClusterStateUpdateTask() {
+ @Override public ClusterState execute(ClusterState currentState) {
+ DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder()
+ .putAll(currentState.nodes())
+ .remove(node.id());
+ latestDiscoNodes = builder.build();
+ return newClusterStateBuilder().state(currentState).nodes(latestDiscoNodes).build();
+ }
+ });
+ } else {
+ handleMasterGone(node, "shut_down");
+ }
+ }
+
+ private void handleJoinRequest(final DiscoveryNode node) {
+ if (!master) {
+ throw new ElasticSearchIllegalStateException("Node [" + localNode + "] not master for join request from [" + node + "]");
+ }
+ if (!transportService.addressSupported(node.address().getClass())) {
+ // TODO, what should we do now? Maybe inform that node that its crap?
+ logger.warn("Received a wrong address type from [{}], ignoring...", node);
+ } else {
+ clusterService.submitStateUpdateTask("zen-disco-receive(from node[" + node + "])", new ClusterStateUpdateTask() {
+ @Override public ClusterState execute(ClusterState currentState) {
+ if (currentState.nodes().nodeExists(node.id())) {
+ // no change, the node already exists in the cluster
+ logger.warn("Received an existing node [{}]", node);
+ return currentState;
+ }
+ return newClusterStateBuilder().state(currentState).nodes(currentState.nodes().newNode(node)).build();
+ }
+ });
+ }
+ }
+
+ private DiscoveryNode pingTillMasterResolved() {
+ while (true) {
+ ZenPing.PingResponse[] pingResponses = pingService.pingAndWait(initialPingTimeout);
+ List<DiscoveryNode> pingMasters = newArrayList();
+ for (ZenPing.PingResponse pingResponse : pingResponses) {
+ if (pingResponse.master() != null) {
+ pingMasters.add(pingResponse.master());
+ }
+ }
+ if (pingMasters.isEmpty()) {
+ // lets tie break between discovered nodes
+ List<DiscoveryNode> possibleMasterNodes = newArrayList();
+ possibleMasterNodes.add(localNode);
+ for (ZenPing.PingResponse pingResponse : pingResponses) {
+ possibleMasterNodes.add(pingResponse.target());
+ }
+ DiscoveryNode electedMaster = electMaster.electMaster(possibleMasterNodes);
+ if (localNode.equals(electedMaster)) {
+ return localNode;
+ }
+ } else {
+ DiscoveryNode electedMaster = electMaster.electMaster(pingMasters);
+ if (electedMaster != null) {
+ return electedMaster;
+ }
+ }
+ }
+ }
+
+ private void sendInitialStateEventIfNeeded() {
+ if (initialStateSent.compareAndSet(false, true)) {
+ for (InitialStateDiscoveryListener listener : initialStateListeners) {
+ listener.initialStateProcessed();
+ }
+ }
+ }
+
+ private class NewClusterStateListener implements PublishClusterStateAction.NewClusterStateListener {
+ @Override public void onNewClusterState(ClusterState clusterState) {
+ handleNewClusterState(clusterState);
+ }
+ }
+
+ private class MembershipListener implements MembershipAction.MembershipListener {
+ @Override public void onJoin(DiscoveryNode node) {
+ handleJoinRequest(node);
+ }
+
+ @Override public void onLeave(DiscoveryNode node) {
+ handleLeaveRequest(node);
+ }
+ }
+
+ private class NodeFailureListener implements NodesFaultDetection.Listener {
+
+ @Override public void onNodeFailure(DiscoveryNode node) {
+ handleNodeFailure(node);
+ }
+ }
+
+ private class MasterNodeFailureListener implements MasterFaultDetection.Listener {
+
+ @Override public void onMasterFailure(DiscoveryNode masterNode) {
+ handleMasterGone(masterNode, "failure");
+ }
+
+ @Override public void onDisconnectedFromMaster() {
+ // got disconnected from the master, send a join request
+ membership.sendJoinRequest(latestDiscoNodes.masterNode(), localNode);
+ }
+ }
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ZenDiscoveryModule.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ZenDiscoveryModule.java
new file mode 100644
index 0000000000000..80289d55dc925
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ZenDiscoveryModule.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.discovery.zen;
+
+import com.google.inject.AbstractModule;
+import org.elasticsearch.discovery.Discovery;
+import org.elasticsearch.discovery.zen.ping.ZenPingService;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class ZenDiscoveryModule extends AbstractModule {
+
+ @Override protected void configure() {
+ bind(ZenPingService.class).asEagerSingleton();
+ bind(Discovery.class).to(ZenDiscovery.class).asEagerSingleton();
+ }
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java
new file mode 100644
index 0000000000000..aa18dce53e2ff
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/elect/ElectMasterService.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.discovery.zen.elect;
+
+import com.google.common.collect.Lists;
+import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.util.component.AbstractComponent;
+import org.elasticsearch.util.settings.Settings;
+
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+import static com.google.common.collect.Lists.*;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class ElectMasterService extends AbstractComponent {
+
+ private final NodeComparator nodeComparator = new NodeComparator();
+
+ public ElectMasterService(Settings settings) {
+ super(settings);
+ }
+
+ /**
+ * Returns a list of the next possible masters.
+ */
+ public DiscoveryNode[] nextPossibleMasters(Iterable<DiscoveryNode> nodes, int numberOfPossibleMasters) {
+ List<DiscoveryNode> sortedNodes = sortedNodes(nodes);
+ if (sortedNodes == null) {
+ return new DiscoveryNode[0];
+ }
+ List<DiscoveryNode> nextPossibleMasters = newArrayListWithExpectedSize(numberOfPossibleMasters);
+ int counter = 0;
+ for (DiscoveryNode nextPossibleMaster : sortedNodes) {
+ if (++counter >= numberOfPossibleMasters) {
+ break;
+ }
+ nextPossibleMasters.add(nextPossibleMaster);
+ }
+ return nextPossibleMasters.toArray(new DiscoveryNode[nextPossibleMasters.size()]);
+ }
+
+ /**
+ * Elects a new master out of the possible nodes, returning it. Returns <tt>null</tt>
+ * if no master has been elected.
+ */
+ public DiscoveryNode electMaster(Iterable<DiscoveryNode> nodes) {
+ List<DiscoveryNode> sortedNodes = sortedNodes(nodes);
+ if (sortedNodes == null) {
+ return null;
+ }
+ return sortedNodes.get(0);
+ }
+
+ private List<DiscoveryNode> sortedNodes(Iterable<DiscoveryNode> nodes) {
+ List<DiscoveryNode> possibleNodes = Lists.newArrayList(nodes);
+ if (possibleNodes.isEmpty()) {
+ return null;
+ }
+ Collections.sort(possibleNodes, nodeComparator);
+ return possibleNodes;
+ }
+
+ private static class NodeComparator implements Comparator<DiscoveryNode> {
+
+ @Override public int compare(DiscoveryNode o1, DiscoveryNode o2) {
+ return o1.id().compareTo(o2.id());
+ }
+ }
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java
new file mode 100644
index 0000000000000..673a73f160383
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/fd/MasterFaultDetection.java
@@ -0,0 +1,276 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.discovery.zen.fd;
+
+import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.discovery.zen.DiscoveryNodesProvider;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.transport.*;
+import org.elasticsearch.util.TimeValue;
+import org.elasticsearch.util.component.AbstractComponent;
+import org.elasticsearch.util.io.stream.StreamInput;
+import org.elasticsearch.util.io.stream.StreamOutput;
+import org.elasticsearch.util.io.stream.Streamable;
+import org.elasticsearch.util.settings.Settings;
+
+import java.io.IOException;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import static org.elasticsearch.cluster.node.DiscoveryNode.*;
+import static org.elasticsearch.util.TimeValue.*;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class MasterFaultDetection extends AbstractComponent {
+
+ public static interface Listener {
+
+ void onMasterFailure(DiscoveryNode masterNode);
+
+ void onDisconnectedFromMaster();
+ }
+
+ private final ThreadPool threadPool;
+
+ private final TransportService transportService;
+
+ private final DiscoveryNodesProvider nodesProvider;
+
+ private final CopyOnWriteArrayList<Listener> listeners = new CopyOnWriteArrayList<Listener>();
+
+
+ private final boolean connectOnNetworkDisconnect;
+
+ private final TimeValue pingInterval;
+
+ private final TimeValue pingRetryTimeout;
+
+ private final int pingRetryCount;
+
+ private final FDConnectionListener connectionListener;
+
+ private volatile DiscoveryNode masterNode;
+
+ private volatile int retryCount;
+
+ private final AtomicBoolean notifiedMasterFailure = new AtomicBoolean();
+
+ public MasterFaultDetection(Settings settings, ThreadPool threadPool, TransportService transportService, DiscoveryNodesProvider nodesProvider) {
+ super(settings);
+ this.threadPool = threadPool;
+ this.transportService = transportService;
+ this.nodesProvider = nodesProvider;
+
+ this.connectOnNetworkDisconnect = componentSettings.getAsBoolean("connect_on_network_disconnect", false);
+ this.pingInterval = componentSettings.getAsTime("ping_interval", timeValueSeconds(1));
+ this.pingRetryTimeout = componentSettings.getAsTime("ping_timeout", timeValueSeconds(6));
+ this.pingRetryCount = componentSettings.getAsInt("ping_retries", 5);
+
+ this.connectionListener = new FDConnectionListener();
+ transportService.addConnectionListener(connectionListener);
+
+ transportService.registerHandler(MasterPingRequestHandler.ACTION, new MasterPingRequestHandler());
+ }
+
+ public DiscoveryNode masterNode() {
+ return this.masterNode;
+ }
+
+ public void addListener(Listener listener) {
+ listeners.add(listener);
+ }
+
+ public void removeListener(Listener listener) {
+ listeners.remove(listener);
+ }
+
+ public void restart(DiscoveryNode masterNode) {
+ stop();
+ start(masterNode);
+ }
+
+ public void start(DiscoveryNode masterNode) {
+ this.masterNode = masterNode;
+ this.retryCount = 0;
+ this.notifiedMasterFailure.set(false);
+
+ // try and connect to make sure we are connected
+ try {
+ transportService.connectToNode(masterNode);
+ } catch (Exception e) {
+ notifyMasterFailure(masterNode);
+ }
+
+ // start the ping process
+ threadPool.schedule(new SendPingRequest(), pingInterval);
+ }
+
+ public void stop() {
+ // also will stop the next ping schedule
+ this.retryCount = 0;
+ this.masterNode = null;
+ }
+
+ public void close() {
+ stop();
+ this.listeners.clear();
+ transportService.removeConnectionListener(connectionListener);
+ transportService.removeHandler(MasterPingRequestHandler.ACTION);
+ }
+
+ private void handleTransportDisconnect(DiscoveryNode node) {
+ if (!node.equals(this.masterNode)) {
+ return;
+ }
+ if (connectOnNetworkDisconnect) {
+ try {
+ transportService.connectToNode(node);
+ } catch (Exception e) {
+ logger.trace("Master [{}] failed on disconnect (with verified connect)", masterNode);
+ notifyMasterFailure(masterNode);
+ }
+ } else {
+ logger.trace("Master [{}] failed on disconnect", masterNode);
+ notifyMasterFailure(masterNode);
+ }
+ }
+
+ private void notifyDisconnectedFromMaster() {
+ for (Listener listener : listeners) {
+ listener.onDisconnectedFromMaster();
+ }
+ // we don't stop on disconnection from master, we keep pinging it
+ }
+
+ private void notifyMasterFailure(DiscoveryNode masterNode) {
+ if (notifiedMasterFailure.compareAndSet(false, true)) {
+ for (Listener listener : listeners) {
+ listener.onMasterFailure(masterNode);
+ }
+ stop();
+ }
+ }
+
+ private class FDConnectionListener implements TransportConnectionListener {
+ @Override public void onNodeConnected(DiscoveryNode node) {
+ }
+
+ @Override public void onNodeDisconnected(DiscoveryNode node) {
+ handleTransportDisconnect(node);
+ }
+ }
+
+ private class SendPingRequest implements Runnable {
+ @Override public void run() {
+ if (masterNode != null) {
+ final DiscoveryNode sentToNode = masterNode;
+ transportService.sendRequest(masterNode, MasterPingRequestHandler.ACTION, new MasterPingRequest(nodesProvider.nodes().localNode()), pingRetryTimeout,
+ new BaseTransportResponseHandler<MasterPingResponseResponse>() {
+ @Override public MasterPingResponseResponse newInstance() {
+ return new MasterPingResponseResponse();
+ }
+
+ @Override public void handleResponse(MasterPingResponseResponse response) {
+ // check if the master node did not get switched on us...
+ if (sentToNode.equals(MasterFaultDetection.this.masterNode())) {
+ if (!response.connectedToMaster) {
+ logger.trace("Master [{}] does not have us registered with it...", masterNode);
+ notifyDisconnectedFromMaster();
+ } else {
+ threadPool.schedule(SendPingRequest.this, pingInterval);
+ }
+ }
+ }
+
+ @Override public void handleException(RemoteTransportException exp) {
+ // check if the master node did not get switched on us...
+ if (sentToNode.equals(MasterFaultDetection.this.masterNode())) {
+ int retryCount = ++MasterFaultDetection.this.retryCount;
+ logger.trace("Master [{}] failed to ping, retry [{}] out of [{}]", exp, masterNode, retryCount, pingRetryCount);
+ if (retryCount >= pingRetryCount) {
+ logger.trace("Master [{}] failed on ping", masterNode);
+ // not good, failure
+ notifyMasterFailure(sentToNode);
+ }
+ }
+ }
+ });
+ }
+ }
+ }
+
+ private class MasterPingRequestHandler extends BaseTransportRequestHandler<MasterPingRequest> {
+
+ public static final String ACTION = "discovery/zen/fd/masterPing";
+
+ @Override public MasterPingRequest newInstance() {
+ return new MasterPingRequest();
+ }
+
+ @Override public void messageReceived(MasterPingRequest request, TransportChannel channel) throws Exception {
+ DiscoveryNodes nodes = nodesProvider.nodes();
+ channel.sendResponse(new MasterPingResponseResponse(nodes.nodeExists(request.node.id())));
+ }
+ }
+
+
+ private class MasterPingRequest implements Streamable {
+
+ private DiscoveryNode node;
+
+ private MasterPingRequest() {
+ }
+
+ private MasterPingRequest(DiscoveryNode node) {
+ this.node = node;
+ }
+
+ @Override public void readFrom(StreamInput in) throws IOException {
+ node = readNode(in);
+ }
+
+ @Override public void writeTo(StreamOutput out) throws IOException {
+ node.writeTo(out);
+ }
+ }
+
+ private class MasterPingResponseResponse implements Streamable {
+
+ private boolean connectedToMaster;
+
+ private MasterPingResponseResponse() {
+ }
+
+ private MasterPingResponseResponse(boolean connectedToMaster) {
+ this.connectedToMaster = connectedToMaster;
+ }
+
+ @Override public void readFrom(StreamInput in) throws IOException {
+ connectedToMaster = in.readBoolean();
+ }
+
+ @Override public void writeTo(StreamOutput out) throws IOException {
+ out.writeBoolean(connectedToMaster);
+ }
+ }
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java
new file mode 100644
index 0000000000000..42ea842c0b762
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/fd/NodesFaultDetection.java
@@ -0,0 +1,269 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.discovery.zen.fd;
+
+import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.transport.*;
+import org.elasticsearch.util.TimeValue;
+import org.elasticsearch.util.component.AbstractComponent;
+import org.elasticsearch.util.io.stream.StreamInput;
+import org.elasticsearch.util.io.stream.StreamOutput;
+import org.elasticsearch.util.io.stream.Streamable;
+import org.elasticsearch.util.settings.Settings;
+
+import java.io.IOException;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.CopyOnWriteArrayList;
+
+import static org.elasticsearch.cluster.node.DiscoveryNodes.*;
+import static org.elasticsearch.util.TimeValue.*;
+import static org.elasticsearch.util.concurrent.ConcurrentMaps.*;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class NodesFaultDetection extends AbstractComponent {
+
+ public static interface Listener {
+
+ void onNodeFailure(DiscoveryNode node);
+ }
+
+ private final ThreadPool threadPool;
+
+ private final TransportService transportService;
+
+
+ private final boolean connectOnNetworkDisconnect;
+
+ private final TimeValue pingInterval;
+
+ private final TimeValue pingRetryTimeout;
+
+ private final int pingRetryCount;
+
+
+ private final CopyOnWriteArrayList<Listener> listeners = new CopyOnWriteArrayList<Listener>();
+
+ private final ConcurrentMap<DiscoveryNode, NodeFD> nodesFD = newConcurrentMap();
+
+ private final FDConnectionListener connectionListener;
+
+ private volatile DiscoveryNodes latestNodes = EMPTY_NODES;
+
+ private volatile boolean running = false;
+
+ public NodesFaultDetection(Settings settings, ThreadPool threadPool, TransportService transportService) {
+ super(settings);
+ this.threadPool = threadPool;
+ this.transportService = transportService;
+
+ this.connectOnNetworkDisconnect = componentSettings.getAsBoolean("connect_on_network_disconnect", false);
+ this.pingInterval = componentSettings.getAsTime("ping_interval", timeValueSeconds(1));
+ this.pingRetryTimeout = componentSettings.getAsTime("ping_timeout", timeValueSeconds(6));
+ this.pingRetryCount = componentSettings.getAsInt("ping_retries", 5);
+
+ transportService.registerHandler(PingRequestHandler.ACTION, new PingRequestHandler());
+
+ this.connectionListener = new FDConnectionListener();
+ transportService.addConnectionListener(connectionListener);
+ }
+
+ public void addListener(Listener listener) {
+ listeners.add(listener);
+ }
+
+ public void removeListener(Listener listener) {
+ listeners.remove(listener);
+ }
+
+ public void updateNodes(DiscoveryNodes nodes) {
+ DiscoveryNodes prevNodes = latestNodes;
+ this.latestNodes = nodes;
+ if (!running) {
+ return;
+ }
+ DiscoveryNodes.Delta delta = nodes.delta(prevNodes);
+ for (DiscoveryNode newNode : delta.addedNodes()) {
+ if (!nodesFD.containsKey(newNode)) {
+ nodesFD.put(newNode, new NodeFD());
+ threadPool.schedule(new SendPingRequest(newNode), pingInterval);
+ }
+ }
+ for (DiscoveryNode removedNode : delta.removedNodes()) {
+ nodesFD.remove(removedNode);
+ }
+ }
+
+ public NodesFaultDetection start() {
+ if (running) {
+ return this;
+ }
+ running = true;
+ return this;
+ }
+
+ public NodesFaultDetection stop() {
+ if (!running) {
+ return this;
+ }
+ running = false;
+ return this;
+ }
+
+ public void close() {
+ stop();
+ transportService.removeHandler(PingRequestHandler.ACTION);
+ transportService.removeConnectionListener(connectionListener);
+ }
+
+ private void handleTransportDisconnect(DiscoveryNode node) {
+ if (!latestNodes.nodeExists(node.id())) {
+ return;
+ }
+ NodeFD nodeFD = nodesFD.remove(node);
+ if (nodeFD == null) {
+ return;
+ }
+ if (!running) {
+ return;
+ }
+ if (connectOnNetworkDisconnect) {
+ try {
+ transportService.connectToNode(node);
+ } catch (Exception e) {
+ logger.trace("Node [{}] failed on disconnect (with verified connect)", node);
+ notifyNodeFailure(node);
+ }
+ } else {
+ logger.trace("Node [{}] failed on disconnect", node);
+ notifyNodeFailure(node);
+ }
+ }
+
+ private void notifyNodeFailure(DiscoveryNode node) {
+ for (Listener listener : listeners) {
+ listener.onNodeFailure(node);
+ }
+ }
+
+ private class SendPingRequest implements Runnable {
+
+ private final DiscoveryNode node;
+
+ private SendPingRequest(DiscoveryNode node) {
+ this.node = node;
+ }
+
+ @Override public void run() {
+ if (!running) {
+ return;
+ }
+ transportService.sendRequest(node, PingRequestHandler.ACTION, new PingRequest(), pingRetryTimeout,
+ new BaseTransportResponseHandler<PingResponse>() {
+ @Override public PingResponse newInstance() {
+ return new PingResponse();
+ }
+
+ @Override public void handleResponse(PingResponse response) {
+ if (running) {
+ NodeFD nodeFD = nodesFD.get(node);
+ if (nodeFD != null) {
+ nodeFD.retryCount = 0;
+ threadPool.schedule(SendPingRequest.this, pingInterval);
+ }
+ }
+ }
+
+ @Override public void handleException(RemoteTransportException exp) {
+ // check if the master node did not get switched on us...
+ if (running) {
+ NodeFD nodeFD = nodesFD.get(node);
+ if (nodeFD != null) {
+ int retryCount = ++nodeFD.retryCount;
+ logger.trace("Node [{}] failed to ping, retry [{}] out of [{}]", exp, node, retryCount, pingRetryCount);
+ if (retryCount >= pingRetryCount) {
+ logger.trace("Node [{}] failed on ping", node);
+ // not good, failure
+ if (nodesFD.remove(node) != null) {
+ notifyNodeFailure(node);
+ }
+ }
+ }
+ }
+ }
+ });
+ }
+ }
+
+ static class NodeFD {
+ volatile int retryCount;
+ }
+
+ private class FDConnectionListener implements TransportConnectionListener {
+ @Override public void onNodeConnected(DiscoveryNode node) {
+ }
+
+ @Override public void onNodeDisconnected(DiscoveryNode node) {
+ handleTransportDisconnect(node);
+ }
+ }
+
+
+ private class PingRequestHandler extends BaseTransportRequestHandler<PingRequest> {
+
+ public static final String ACTION = "discovery/zen/fd/ping";
+
+ @Override public PingRequest newInstance() {
+ return new PingRequest();
+ }
+
+ @Override public void messageReceived(PingRequest request, TransportChannel channel) throws Exception {
+ channel.sendResponse(new PingResponse());
+ }
+ }
+
+
+ private class PingRequest implements Streamable {
+
+ private PingRequest() {
+ }
+
+ @Override public void readFrom(StreamInput in) throws IOException {
+ }
+
+ @Override public void writeTo(StreamOutput out) throws IOException {
+ }
+ }
+
+ private class PingResponse implements Streamable {
+
+ private PingResponse() {
+ }
+
+ @Override public void readFrom(StreamInput in) throws IOException {
+ }
+
+ @Override public void writeTo(StreamOutput out) throws IOException {
+ }
+ }
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java
new file mode 100644
index 0000000000000..f2bfd29e4aed0
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/membership/MembershipAction.java
@@ -0,0 +1,152 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.discovery.zen.membership;
+
+import org.elasticsearch.ElasticSearchException;
+import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.transport.BaseTransportRequestHandler;
+import org.elasticsearch.transport.TransportChannel;
+import org.elasticsearch.transport.TransportService;
+import org.elasticsearch.transport.VoidTransportResponseHandler;
+import org.elasticsearch.util.TimeValue;
+import org.elasticsearch.util.component.AbstractComponent;
+import org.elasticsearch.util.io.stream.StreamInput;
+import org.elasticsearch.util.io.stream.StreamOutput;
+import org.elasticsearch.util.io.stream.Streamable;
+import org.elasticsearch.util.io.stream.VoidStreamable;
+import org.elasticsearch.util.settings.Settings;
+
+import java.io.IOException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class MembershipAction extends AbstractComponent {
+
+ public static interface MembershipListener {
+ void onJoin(DiscoveryNode node);
+
+ void onLeave(DiscoveryNode node);
+ }
+
+ private final TransportService transportService;
+
+ private final MembershipListener listener;
+
+ public MembershipAction(Settings settings, TransportService transportService, MembershipListener listener) {
+ super(settings);
+ this.transportService = transportService;
+ this.listener = listener;
+
+ transportService.registerHandler(JoinRequestRequestHandler.ACTION, new JoinRequestRequestHandler());
+ transportService.registerHandler(LeaveRequestRequestHandler.ACTION, new LeaveRequestRequestHandler());
+ }
+
+ public void close() {
+ transportService.removeHandler(JoinRequestRequestHandler.ACTION);
+ transportService.removeHandler(LeaveRequestRequestHandler.ACTION);
+ }
+
+ public void sendLeaveRequest(DiscoveryNode masterNode, DiscoveryNode node) {
+ transportService.sendRequest(node, LeaveRequestRequestHandler.ACTION, new LeaveRequest(masterNode), VoidTransportResponseHandler.INSTANCE_NOSPAWN);
+ }
+
+ public void sendLeaveRequestBlocking(DiscoveryNode masterNode, DiscoveryNode node, TimeValue timeout) throws ElasticSearchException, TimeoutException {
+ transportService.submitRequest(masterNode, LeaveRequestRequestHandler.ACTION, new LeaveRequest(node), VoidTransportResponseHandler.INSTANCE_NOSPAWN).txGet(timeout.millis(), TimeUnit.MILLISECONDS);
+ }
+
+ public void sendJoinRequest(DiscoveryNode masterNode, DiscoveryNode node) {
+ transportService.sendRequest(masterNode, JoinRequestRequestHandler.ACTION, new JoinRequest(node), VoidTransportResponseHandler.INSTANCE_NOSPAWN);
+ }
+
+ public void sendJoinRequestBlocking(DiscoveryNode masterNode, DiscoveryNode node, TimeValue timeout) throws ElasticSearchException, TimeoutException {
+ transportService.submitRequest(masterNode, JoinRequestRequestHandler.ACTION, new JoinRequest(node), VoidTransportResponseHandler.INSTANCE_NOSPAWN).txGet(timeout.millis(), TimeUnit.MILLISECONDS);
+ }
+
+ private static class JoinRequest implements Streamable {
+
+ private DiscoveryNode node;
+
+ private JoinRequest() {
+ }
+
+ private JoinRequest(DiscoveryNode node) {
+ this.node = node;
+ }
+
+ @Override public void readFrom(StreamInput in) throws IOException {
+ node = DiscoveryNode.readNode(in);
+ }
+
+ @Override public void writeTo(StreamOutput out) throws IOException {
+ node.writeTo(out);
+ }
+ }
+
+ private class JoinRequestRequestHandler extends BaseTransportRequestHandler<JoinRequest> {
+
+ static final String ACTION = "discovery/zen/join";
+
+ @Override public JoinRequest newInstance() {
+ return new JoinRequest();
+ }
+
+ @Override public void messageReceived(JoinRequest request, TransportChannel channel) throws Exception {
+ listener.onJoin(request.node);
+ channel.sendResponse(VoidStreamable.INSTANCE);
+ }
+ }
+
+ private static class LeaveRequest implements Streamable {
+
+ private DiscoveryNode node;
+
+ private LeaveRequest() {
+ }
+
+ private LeaveRequest(DiscoveryNode node) {
+ this.node = node;
+ }
+
+ @Override public void readFrom(StreamInput in) throws IOException {
+ node = DiscoveryNode.readNode(in);
+ }
+
+ @Override public void writeTo(StreamOutput out) throws IOException {
+ node.writeTo(out);
+ }
+ }
+
+ private class LeaveRequestRequestHandler extends BaseTransportRequestHandler<LeaveRequest> {
+
+ static final String ACTION = "discovery/zen/leave";
+
+ @Override public LeaveRequest newInstance() {
+ return new LeaveRequest();
+ }
+
+ @Override public void messageReceived(LeaveRequest request, TransportChannel channel) throws Exception {
+ listener.onLeave(request.node);
+ channel.sendResponse(VoidStreamable.INSTANCE);
+ }
+ }
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ping/ZenPing.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ping/ZenPing.java
new file mode 100644
index 0000000000000..06475a76dcfcf
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ping/ZenPing.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.discovery.zen.ping;
+
+import org.elasticsearch.ElasticSearchException;
+import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.discovery.zen.DiscoveryNodesProvider;
+import org.elasticsearch.util.TimeValue;
+import org.elasticsearch.util.component.LifecycleComponent;
+import org.elasticsearch.util.io.stream.StreamInput;
+import org.elasticsearch.util.io.stream.StreamOutput;
+import org.elasticsearch.util.io.stream.Streamable;
+
+import java.io.IOException;
+
+import static org.elasticsearch.cluster.node.DiscoveryNode.*;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public interface ZenPing extends LifecycleComponent<ZenPing> {
+
+ void setNodesProvider(DiscoveryNodesProvider nodesProvider);
+
+ void ping(PingListener listener, TimeValue timeout) throws ElasticSearchException;
+
+ public interface PingListener {
+
+ void onPing(PingResponse[] pings);
+ }
+
+ public class PingResponse implements Streamable {
+
+ private DiscoveryNode target;
+
+ private DiscoveryNode master;
+
+ public PingResponse() {
+ }
+
+ public PingResponse(DiscoveryNode target, DiscoveryNode master) {
+ this.target = target;
+ this.master = master;
+ }
+
+ public DiscoveryNode target() {
+ return target;
+ }
+
+ public DiscoveryNode master() {
+ return master;
+ }
+
+ @Override public void readFrom(StreamInput in) throws IOException {
+ target = readNode(in);
+ if (in.readBoolean()) {
+ master = readNode(in);
+ }
+ }
+
+ @Override public void writeTo(StreamOutput out) throws IOException {
+ target.writeTo(out);
+ if (master == null) {
+ out.writeBoolean(false);
+ } else {
+ out.writeBoolean(true);
+ master.writeTo(out);
+ }
+ }
+
+ @Override public String toString() {
+ return "ping_response target [" + target + "], master [" + master + "]";
+ }
+ }
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ping/ZenPingException.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ping/ZenPingException.java
new file mode 100644
index 0000000000000..6d598e1ffcbab
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ping/ZenPingException.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.discovery.zen.ping;
+
+import org.elasticsearch.discovery.DiscoveryException;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class ZenPingException extends DiscoveryException {
+
+ public ZenPingException(String message) {
+ super(message);
+ }
+
+ public ZenPingException(String message, Throwable cause) {
+ super(message, cause);
+ }
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ping/ZenPingService.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ping/ZenPingService.java
new file mode 100644
index 0000000000000..d9938e5429220
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ping/ZenPingService.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.discovery.zen.ping;
+
+import com.google.common.collect.ImmutableList;
+import com.google.inject.Inject;
+import org.elasticsearch.ElasticSearchException;
+import org.elasticsearch.ElasticSearchIllegalStateException;
+import org.elasticsearch.cluster.ClusterName;
+import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.discovery.zen.DiscoveryNodesProvider;
+import org.elasticsearch.discovery.zen.ping.multicast.MulticastZenPing;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.transport.TransportService;
+import org.elasticsearch.util.TimeValue;
+import org.elasticsearch.util.component.AbstractLifecycleComponent;
+import org.elasticsearch.util.settings.Settings;
+
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class ZenPingService extends AbstractLifecycleComponent<ZenPing> implements ZenPing {
+
+ private volatile ImmutableList<? extends ZenPing> zenPings = ImmutableList.of();
+
+ @Inject public ZenPingService(Settings settings, ThreadPool threadPool, TransportService transportService, ClusterName clusterName) {
+ super(settings);
+
+ this.zenPings = ImmutableList.of(new MulticastZenPing(settings, threadPool, transportService, clusterName));
+ }
+
+ public ImmutableList<? extends ZenPing> zenPings() {
+ return this.zenPings;
+ }
+
+ public void zenPings(ImmutableList<? extends ZenPing> pings) {
+ this.zenPings = pings;
+ if (lifecycle.started()) {
+ for (ZenPing zenPing : zenPings) {
+ zenPing.start();
+ }
+ } else if (lifecycle.stopped()) {
+ for (ZenPing zenPing : zenPings) {
+ zenPing.stop();
+ }
+ }
+ }
+
+ @Override public void setNodesProvider(DiscoveryNodesProvider nodesProvider) {
+ if (lifecycle.started()) {
+ throw new ElasticSearchIllegalStateException("Can't set nodes provider when started");
+ }
+ for (ZenPing zenPing : zenPings) {
+ zenPing.setNodesProvider(nodesProvider);
+ }
+ }
+
+ @Override protected void doStart() throws ElasticSearchException {
+ for (ZenPing zenPing : zenPings) {
+ zenPing.start();
+ }
+ }
+
+ @Override protected void doStop() throws ElasticSearchException {
+ for (ZenPing zenPing : zenPings) {
+ zenPing.stop();
+ }
+ }
+
+ @Override protected void doClose() throws ElasticSearchException {
+ for (ZenPing zenPing : zenPings) {
+ zenPing.close();
+ }
+ }
+
+ public PingResponse[] pingAndWait(TimeValue timeout) {
+ final AtomicReference<PingResponse[]> response = new AtomicReference<PingResponse[]>();
+ final CountDownLatch latch = new CountDownLatch(1);
+ ping(new PingListener() {
+ @Override public void onPing(PingResponse[] pings) {
+ response.set(pings);
+ latch.countDown();
+ }
+ }, timeout);
+ try {
+ latch.await();
+ return response.get();
+ } catch (InterruptedException e) {
+ return null;
+ }
+ }
+
+ @Override public void ping(PingListener listener, TimeValue timeout) throws ElasticSearchException {
+ ImmutableList<? extends ZenPing> zenPings = this.zenPings;
+ CompoundPingListener compoundPingListener = new CompoundPingListener(listener, zenPings);
+ for (ZenPing zenPing : zenPings) {
+ zenPing.ping(compoundPingListener, timeout);
+ }
+ }
+
+ private static class CompoundPingListener implements PingListener {
+
+ private final PingListener listener;
+
+ private final ImmutableList<? extends ZenPing> zenPings;
+
+ private final AtomicInteger counter;
+
+ private ConcurrentMap<DiscoveryNode, PingResponse> responses = new ConcurrentHashMap<DiscoveryNode, PingResponse>();
+
+ private CompoundPingListener(PingListener listener, ImmutableList<? extends ZenPing> zenPings) {
+ this.listener = listener;
+ this.zenPings = zenPings;
+ this.counter = new AtomicInteger(zenPings.size());
+ }
+
+ @Override public void onPing(PingResponse[] pings) {
+ if (pings != null) {
+ for (PingResponse pingResponse : pings) {
+ responses.put(pingResponse.target(), pingResponse);
+ }
+ }
+ if (counter.decrementAndGet() == 0) {
+ listener.onPing(responses.values().toArray(new PingResponse[responses.size()]));
+ }
+ }
+ }
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ping/multicast/MulticastZenPing.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ping/multicast/MulticastZenPing.java
new file mode 100644
index 0000000000000..538a204cddea9
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/ping/multicast/MulticastZenPing.java
@@ -0,0 +1,359 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.discovery.zen.ping.multicast;
+
+import org.elasticsearch.ElasticSearchException;
+import org.elasticsearch.ElasticSearchIllegalStateException;
+import org.elasticsearch.cluster.ClusterName;
+import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.discovery.DiscoveryException;
+import org.elasticsearch.discovery.zen.DiscoveryNodesProvider;
+import org.elasticsearch.discovery.zen.ping.ZenPing;
+import org.elasticsearch.discovery.zen.ping.ZenPingException;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.transport.*;
+import org.elasticsearch.util.TimeValue;
+import org.elasticsearch.util.component.AbstractLifecycleComponent;
+import org.elasticsearch.util.io.stream.*;
+import org.elasticsearch.util.settings.ImmutableSettings;
+import org.elasticsearch.util.settings.Settings;
+
+import java.io.IOException;
+import java.net.*;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
+
+import static org.elasticsearch.cluster.node.DiscoveryNode.*;
+import static org.elasticsearch.util.concurrent.ConcurrentMaps.*;
+import static org.elasticsearch.util.concurrent.DynamicExecutors.*;
+import static org.elasticsearch.util.io.NetworkUtils.*;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class MulticastZenPing extends AbstractLifecycleComponent<ZenPing> implements ZenPing {
+
+ private final String address;
+
+ private final int port;
+
+ private final String group;
+
+ private final int bufferSize;
+
+ private final int ttl;
+
+ private final ThreadPool threadPool;
+
+ private final TransportService transportService;
+
+ private final ClusterName clusterName;
+
+
+ private volatile DiscoveryNodesProvider nodesProvider;
+
+ private volatile Receiver receiver;
+
+ private volatile Thread receiverThread;
+
+ private MulticastSocket multicastSocket;
+
+ private DatagramPacket datagramPacketSend;
+
+ private DatagramPacket datagramPacketReceive;
+
+ private final AtomicInteger pingIdGenerator = new AtomicInteger();
+
+ private final Map<Integer, ConcurrentMap<DiscoveryNode, PingResponse>> receivedResponses = newConcurrentMap();
+
+ private final Object sendMutex = new Object();
+
+ private final Object receiveMutex = new Object();
+
+ public MulticastZenPing(ThreadPool threadPool, TransportService transportService, ClusterName clusterName) {
+ this(ImmutableSettings.Builder.EMPTY_SETTINGS, threadPool, transportService, clusterName);
+ }
+
+ public MulticastZenPing(Settings settings, ThreadPool threadPool, TransportService transportService, ClusterName clusterName) {
+ super(settings);
+ this.threadPool = threadPool;
+ this.transportService = transportService;
+ this.clusterName = clusterName;
+
+ this.address = componentSettings.get("address");
+ this.port = componentSettings.getAsInt("port", 54328);
+ this.group = componentSettings.get("group", "224.2.2.4");
+ this.bufferSize = componentSettings.getAsInt("buffer_size", 2048);
+ this.ttl = componentSettings.getAsInt("ttl", 3);
+
+ this.transportService.registerHandler(PingResponseRequestHandler.ACTION, new PingResponseRequestHandler());
+ }
+
+ @Override public void setNodesProvider(DiscoveryNodesProvider nodesProvider) {
+ if (lifecycle.started()) {
+ throw new ElasticSearchIllegalStateException("Can't set nodes provider when started");
+ }
+ this.nodesProvider = nodesProvider;
+ }
+
+ @Override protected void doStart() throws ElasticSearchException {
+ try {
+ this.datagramPacketReceive = new DatagramPacket(new byte[bufferSize], bufferSize);
+ this.datagramPacketSend = new DatagramPacket(new byte[bufferSize], bufferSize, InetAddress.getByName(group), port);
+ } catch (Exception e) {
+ throw new DiscoveryException("Failed to set datagram packets", e);
+ }
+
+ try {
+ MulticastSocket multicastSocket = new MulticastSocket(null);
+ multicastSocket.setReuseAddress(true);
+ // bind to receive interface
+ multicastSocket.bind(new InetSocketAddress(port));
+ multicastSocket.setTimeToLive(ttl);
+ // set the send interface
+ InetAddress multicastInterface = resolvePublishHostAddress(address, settings);
+ multicastSocket.setInterface(multicastInterface);
+ multicastSocket.setReceiveBufferSize(bufferSize);
+ multicastSocket.setSendBufferSize(bufferSize);
+ multicastSocket.joinGroup(InetAddress.getByName(group));
+ multicastSocket.setSoTimeout(60000);
+
+ this.multicastSocket = multicastSocket;
+ } catch (Exception e) {
+ throw new DiscoveryException("Failed to setup multicast socket", e);
+ }
+
+ this.receiver = new Receiver();
+ this.receiverThread = daemonThreadFactory(settings, "discovery#multicast#received").newThread(receiver);
+ this.receiverThread.start();
+ }
+
+ @Override protected void doStop() throws ElasticSearchException {
+ receiver.stop();
+ receiverThread.interrupt();
+ multicastSocket.close();
+ }
+
+ @Override protected void doClose() throws ElasticSearchException {
+ }
+
+ public PingResponse[] pingAndWait(TimeValue timeout) {
+ final AtomicReference<PingResponse[]> response = new AtomicReference<PingResponse[]>();
+ final CountDownLatch latch = new CountDownLatch(1);
+ ping(new PingListener() {
+ @Override public void onPing(PingResponse[] pings) {
+ response.set(pings);
+ latch.countDown();
+ }
+ }, timeout);
+ try {
+ latch.await();
+ return response.get();
+ } catch (InterruptedException e) {
+ return null;
+ }
+ }
+
+ @Override public void ping(final PingListener listener, final TimeValue timeout) {
+ final int id = pingIdGenerator.incrementAndGet();
+ receivedResponses.put(id, new ConcurrentHashMap<DiscoveryNode, PingResponse>());
+ sendPingRequest(id);
+ // try and send another ping request halfway through (just in case someone woke up during it...)
+ // this can be a good trade-off to nailing the initial lookup or un-delivered messages
+ threadPool.schedule(new Runnable() {
+ @Override public void run() {
+ try {
+ sendPingRequest(id);
+ } catch (Exception e) {
+ logger.debug("[{}] Failed to send second ping request", e, id);
+ }
+ }
+ }, timeout.millis() / 2, TimeUnit.MILLISECONDS);
+ threadPool.schedule(new Runnable() {
+ @Override public void run() {
+ ConcurrentMap<DiscoveryNode, PingResponse> responses = receivedResponses.remove(id);
+ listener.onPing(responses.values().toArray(new PingResponse[responses.size()]));
+ }
+ }, timeout.millis(), TimeUnit.MILLISECONDS);
+ }
+
+ private void sendPingRequest(int id) {
+ synchronized (sendMutex) {
+ try {
+ HandlesStreamOutput out = BytesStreamOutput.Cached.cachedHandles();
+ out.writeInt(id);
+ clusterName.writeTo(out);
+ nodesProvider.nodes().localNode().writeTo(out);
+ datagramPacketSend.setData(((BytesStreamOutput) out.wrappedOut()).copiedByteArray());
+ } catch (IOException e) {
+ receivedResponses.remove(id);
+ throw new ZenPingException("Failed to serialize ping request", e);
+ }
+ try {
+ multicastSocket.send(datagramPacketSend);
+ if (logger.isTraceEnabled()) {
+ logger.trace("[{}] Sending ping request", id);
+ }
+ } catch (IOException e) {
+ receivedResponses.remove(id);
+ throw new ZenPingException("Failed to send ping request over multicast", e);
+ }
+ }
+ }
+
+ private class PingResponseRequestHandler extends BaseTransportRequestHandler<WrappedPingResponse> {
+
+ static final String ACTION = "discovery/zen/multicast";
+
+ @Override public WrappedPingResponse newInstance() {
+ return new WrappedPingResponse();
+ }
+
+ @Override public void messageReceived(WrappedPingResponse request, TransportChannel channel) throws Exception {
+ if (logger.isTraceEnabled()) {
+ logger.trace("[{}] Received {}", request.id, request.pingResponse);
+ }
+ ConcurrentMap<DiscoveryNode, PingResponse> responses = receivedResponses.get(request.id);
+ if (responses == null) {
+ logger.warn("Received ping response with no matching id [{}]", request.id);
+ } else {
+ responses.put(request.pingResponse.target(), request.pingResponse);
+ }
+ channel.sendResponse(VoidStreamable.INSTANCE);
+ }
+ }
+
+ class WrappedPingResponse implements Streamable {
+
+ int id;
+
+ PingResponse pingResponse;
+
+ WrappedPingResponse() {
+ }
+
+ WrappedPingResponse(int id, PingResponse pingResponse) {
+ this.id = id;
+ this.pingResponse = pingResponse;
+ }
+
+ @Override public void readFrom(StreamInput in) throws IOException {
+ id = in.readInt();
+ pingResponse = new PingResponse();
+ pingResponse.readFrom(in);
+ }
+
+ @Override public void writeTo(StreamOutput out) throws IOException {
+ out.writeInt(id);
+ pingResponse.writeTo(out);
+ }
+ }
+
+
+ private class Receiver implements Runnable {
+
+ private volatile boolean running = true;
+
+ public void stop() {
+ running = false;
+ }
+
+ @Override public void run() {
+ while (running) {
+ try {
+ int id;
+ DiscoveryNode requestingNodeX;
+ ClusterName clusterName;
+ synchronized (receiveMutex) {
+ try {
+ multicastSocket.receive(datagramPacketReceive);
+ } catch (SocketTimeoutException ignore) {
+ continue;
+ } catch (Exception e) {
+ if (running) {
+ logger.warn("Failed to receive packet", e);
+ }
+ continue;
+ }
+ try {
+ StreamInput input = HandlesStreamInput.Cached.cached(new BytesStreamInput(datagramPacketReceive.getData(), datagramPacketReceive.getOffset(), datagramPacketReceive.getLength()));
+ id = input.readInt();
+ clusterName = ClusterName.readClusterName(input);
+ requestingNodeX = readNode(input);
+ } catch (Exception e) {
+ logger.warn("Failed to read requesting node from {}", e, datagramPacketReceive.getSocketAddress());
+ continue;
+ }
+ }
+ DiscoveryNodes discoveryNodes = nodesProvider.nodes();
+ final DiscoveryNode requestingNode = requestingNodeX;
+ if (requestingNode.id().equals(discoveryNodes.localNodeId())) {
+ // that's me, ignore
+ continue;
+ }
+ if (!clusterName.equals(MulticastZenPing.this.clusterName)) {
+ // not our cluster, ignore it...
+ continue;
+ }
+ final WrappedPingResponse wrappedPingResponse = new WrappedPingResponse();
+ wrappedPingResponse.id = id;
+ wrappedPingResponse.pingResponse = new PingResponse(discoveryNodes.localNode(), discoveryNodes.masterNode());
+
+ if (logger.isTraceEnabled()) {
+ logger.trace("[{}] Received ping_request from [{}], sending {}", id, requestingNode, wrappedPingResponse.pingResponse);
+ }
+
+ if (!transportService.nodeConnected(requestingNode)) {
+ // do the connect and send on a thread pool
+ threadPool.execute(new Runnable() {
+ @Override public void run() {
+ // connect to the node if possible
+ try {
+ transportService.connectToNode(requestingNode);
+ } catch (Exception e) {
+ logger.warn("Failed to connect to requesting node {}", e, requestingNode);
+ }
+ transportService.sendRequest(requestingNode, PingResponseRequestHandler.ACTION, wrappedPingResponse, new VoidTransportResponseHandler(false) {
+ @Override public void handleException(RemoteTransportException exp) {
+ logger.warn("Failed to receive confirmation on sent ping response to [{}]", exp, requestingNode);
+ }
+ });
+ }
+ });
+ } else {
+ transportService.sendRequest(requestingNode, PingResponseRequestHandler.ACTION, wrappedPingResponse, new VoidTransportResponseHandler(false) {
+ @Override public void handleException(RemoteTransportException exp) {
+ logger.warn("Failed to receive confirmation on sent ping response to [{}]", exp, requestingNode);
+ }
+ });
+ }
+ } catch (Exception e) {
+ logger.warn("Unexpected exception in multicast receiver", e);
+ }
+ }
+ }
+ }
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java
new file mode 100644
index 0000000000000..1a5f783511bc6
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateAction.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.discovery.zen.publish;
+
+import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.discovery.zen.DiscoveryNodesProvider;
+import org.elasticsearch.transport.*;
+import org.elasticsearch.util.component.AbstractComponent;
+import org.elasticsearch.util.io.stream.StreamInput;
+import org.elasticsearch.util.io.stream.StreamOutput;
+import org.elasticsearch.util.io.stream.Streamable;
+import org.elasticsearch.util.io.stream.VoidStreamable;
+import org.elasticsearch.util.settings.Settings;
+
+import java.io.IOException;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class PublishClusterStateAction extends AbstractComponent {
+
+ public static interface NewClusterStateListener {
+ void onNewClusterState(ClusterState clusterState);
+ }
+
+ private final TransportService transportService;
+
+ private final DiscoveryNodesProvider nodesProvider;
+
+ private final NewClusterStateListener listener;
+
+ public PublishClusterStateAction(Settings settings, TransportService transportService, DiscoveryNodesProvider nodesProvider,
+ NewClusterStateListener listener) {
+ super(settings);
+ this.transportService = transportService;
+ this.nodesProvider = nodesProvider;
+ this.listener = listener;
+ transportService.registerHandler(PublishClusterStateRequestHandler.ACTION, new PublishClusterStateRequestHandler());
+ }
+
+ public void close() {
+ transportService.removeHandler(PublishClusterStateRequestHandler.ACTION);
+ }
+
+ public void publish(ClusterState clusterState) {
+ DiscoveryNode localNode = nodesProvider.nodes().localNode();
+ for (final DiscoveryNode node : clusterState.nodes()) {
+ if (node.equals(localNode)) {
+ // no need to send to our self
+ continue;
+ }
+ transportService.sendRequest(node, PublishClusterStateRequestHandler.ACTION, new PublishClusterStateRequest(clusterState), new VoidTransportResponseHandler(false) {
+ @Override public void handleException(RemoteTransportException exp) {
+ logger.warn("Failed to send cluster state to [{}], should be detected as failed soon...", exp, node);
+ }
+ });
+ }
+ }
+
+ private class PublishClusterStateRequest implements Streamable {
+
+ private ClusterState clusterState;
+
+ private PublishClusterStateRequest() {
+ }
+
+ private PublishClusterStateRequest(ClusterState clusterState) {
+ this.clusterState = clusterState;
+ }
+
+ @Override public void readFrom(StreamInput in) throws IOException {
+ clusterState = ClusterState.Builder.readFrom(in, settings, nodesProvider.nodes().localNode());
+ }
+
+ @Override public void writeTo(StreamOutput out) throws IOException {
+ ClusterState.Builder.writeTo(clusterState, out);
+ }
+ }
+
+ private class PublishClusterStateRequestHandler extends BaseTransportRequestHandler<PublishClusterStateRequest> {
+
+ static final String ACTION = "discovery/zen/publish";
+
+ @Override public PublishClusterStateRequest newInstance() {
+ return new PublishClusterStateRequest();
+ }
+
+ @Override public void messageReceived(PublishClusterStateRequest request, TransportChannel channel) throws Exception {
+ listener.onNewClusterState(request.clusterState);
+ channel.sendResponse(VoidStreamable.INSTANCE);
+ }
+ }
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java b/modules/elasticsearch/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java
index 606b33eec2d11..a88ac9ffc8a01 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java
@@ -27,6 +27,7 @@
import org.elasticsearch.util.SizeUnit;
import org.elasticsearch.util.SizeValue;
import org.elasticsearch.util.component.AbstractLifecycleComponent;
+import org.elasticsearch.util.io.NetworkUtils;
import org.elasticsearch.util.settings.Settings;
import org.elasticsearch.util.transport.BoundTransportAddress;
import org.elasticsearch.util.transport.InetSocketTransportAddress;
@@ -49,7 +50,7 @@
import java.util.concurrent.atomic.AtomicReference;
import static org.elasticsearch.util.concurrent.DynamicExecutors.*;
-import static org.elasticsearch.util.io.HostResolver.*;
+import static org.elasticsearch.util.io.NetworkUtils.*;
/**
* @author kimchy (shay.banon)
@@ -103,7 +104,7 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
this.publishHost = componentSettings.get("publish_host");
this.tcpNoDelay = componentSettings.getAsBoolean("tcp_no_delay", true);
this.tcpKeepAlive = componentSettings.getAsBoolean("tcp_keep_alive", null);
- this.reuseAddress = componentSettings.getAsBoolean("reuse_address", null);
+ this.reuseAddress = componentSettings.getAsBoolean("reuse_address", NetworkUtils.defaultReuseAddress());
this.tcpSendBufferSize = componentSettings.getAsSize("tcp_send_buffer_size", null);
this.tcpReceiveBufferSize = componentSettings.getAsSize("tcp_receive_buffer_size", null);
@@ -189,17 +190,7 @@ public void httpServerAdapter(HttpServerAdapter httpServerAdapter) {
InetSocketAddress boundAddress = (InetSocketAddress) serverChannel.getLocalAddress();
InetSocketAddress publishAddress;
try {
- InetAddress publishAddressX = resolvePublishHostAddress(publishHost, settings);
- if (publishAddressX == null) {
- // if its 0.0.0.0, we can't publish that.., default to the local ip address
- if (boundAddress.getAddress().isAnyLocalAddress()) {
- publishAddress = new InetSocketAddress(resolvePublishHostAddress(publishHost, settings, LOCAL_IP), boundAddress.getPort());
- } else {
- publishAddress = boundAddress;
- }
- } else {
- publishAddress = new InetSocketAddress(publishAddressX, boundAddress.getPort());
- }
+ publishAddress = new InetSocketAddress(resolvePublishHostAddress(publishHost, settings), boundAddress.getPort());
} catch (Exception e) {
throw new BindTransportException("Failed to resolve publish address", e);
}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/json/JsonFilterBuilder.java b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/json/JsonFilterBuilder.java
index 7a87faccaacb5..ed0a1a1ebd9ed 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/json/JsonFilterBuilder.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/json/JsonFilterBuilder.java
@@ -22,7 +22,7 @@
import org.elasticsearch.util.json.ToJson;
/**
- * @author kimchy (Shay Banon)
+ * @author kimchy (shay.banon)
*/
public interface JsonFilterBuilder extends ToJson {
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/jmx/JmxService.java b/modules/elasticsearch/src/main/java/org/elasticsearch/jmx/JmxService.java
index de5423b6014b6..9a87e5e35a820 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/jmx/JmxService.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/jmx/JmxService.java
@@ -19,7 +19,7 @@
package org.elasticsearch.jmx;
-import org.elasticsearch.util.io.HostResolver;
+import org.elasticsearch.util.io.NetworkUtils;
import org.elasticsearch.util.logging.ESLogger;
import org.elasticsearch.util.settings.Settings;
import org.elasticsearch.util.transport.PortsRange;
@@ -36,8 +36,6 @@
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicReference;
-import static org.elasticsearch.util.io.HostResolver.*;
-
/**
* @author kimchy (Shay Banon)
*/
@@ -116,7 +114,7 @@ public void connectAndRegister(String nodeDescription) {
connectorServer.start();
// create the publish url
- String publishHost = HostResolver.resolvePublishHostAddress(settings.get("jmx.publishHost"), settings, LOCAL_IP).getHostAddress();
+ String publishHost = NetworkUtils.resolvePublishHostAddress(settings.get("jmx.publishHost"), settings).getHostAddress();
publishUrl = settings.get("jmx.publishUrl", JMXRMI_PUBLISH_URI_PATTERN).replace("{jmx.port}", Integer.toString(portNumber)).replace("{jmx.host}", publishHost);
} catch (Exception e) {
lastException.set(e);
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/node/internal/InternalNode.java b/modules/elasticsearch/src/main/java/org/elasticsearch/node/internal/InternalNode.java
index 271b9b9f9c832..929e16e112ea7 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/node/internal/InternalNode.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/node/internal/InternalNode.java
@@ -46,6 +46,7 @@
import org.elasticsearch.jmx.JmxService;
import org.elasticsearch.monitor.MonitorModule;
import org.elasticsearch.monitor.MonitorService;
+import org.elasticsearch.monitor.jvm.JvmConfig;
import org.elasticsearch.node.Node;
import org.elasticsearch.plugins.PluginsModule;
import org.elasticsearch.plugins.PluginsService;
@@ -101,7 +102,7 @@ public InternalNode(Settings pSettings, boolean loadConfigSettings) throws Elast
Tuple<Settings, Environment> tuple = InternalSettingsPerparer.prepareSettings(pSettings, loadConfigSettings);
ESLogger logger = Loggers.getLogger(Node.class, tuple.v1().get("name"));
- logger.info("{{}}: Initializing ...", Version.full());
+ logger.info("{{}}[{}]: Initializing ...", Version.full(), JvmConfig.jvmConfig().pid());
this.pluginsService = new PluginsService(tuple.v1(), tuple.v2());
this.settings = pluginsService.updatedSettings();
@@ -135,7 +136,7 @@ public InternalNode(Settings pSettings, boolean loadConfigSettings) throws Elast
client = injector.getInstance(Client.class);
- logger.info("{{}}: Initialized", Version.full());
+ logger.info("{{}}[{}]: Initialized", Version.full(), JvmConfig.jvmConfig().pid());
}
@Override public Settings settings() {
@@ -152,7 +153,7 @@ public Node start() {
}
ESLogger logger = Loggers.getLogger(Node.class, settings.get("name"));
- logger.info("{{}}: Starting ...", Version.full());
+ logger.info("{{}}[{}]: Starting ...", Version.full(), JvmConfig.jvmConfig().pid());
for (Class<? extends LifecycleComponent> plugin : pluginsService.services()) {
injector.getInstance(plugin).start();
@@ -175,7 +176,7 @@ public Node start() {
}
injector.getInstance(JmxService.class).connectAndRegister(discoService.nodeDescription());
- logger.info("{{}}: Started", Version.full());
+ logger.info("{{}}[{}]: Started", Version.full(), JvmConfig.jvmConfig().pid());
return this;
}
@@ -185,7 +186,7 @@ public Node start() {
return this;
}
ESLogger logger = Loggers.getLogger(Node.class, settings.get("name"));
- logger.info("{{}}: Stopping ...", Version.full());
+ logger.info("{{}}[{}]: Stopping ...", Version.full(), JvmConfig.jvmConfig().pid());
if (settings.getAsBoolean("http.enabled", true)) {
injector.getInstance(HttpServer.class).stop();
@@ -215,7 +216,7 @@ public Node start() {
Injectors.close(injector);
- logger.info("{{}}: Stopped", Version.full());
+ logger.info("{{}}[{}]: Stopped", Version.full(), JvmConfig.jvmConfig().pid());
return this;
}
@@ -229,7 +230,7 @@ public void close() {
}
ESLogger logger = Loggers.getLogger(Node.class, settings.get("name"));
- logger.info("{{}}: Closing ...", Version.full());
+ logger.info("{{}}[{}]: Closing ...", Version.full(), JvmConfig.jvmConfig().pid());
if (settings.getAsBoolean("http.enabled", true)) {
injector.getInstance(HttpServer.class).close();
@@ -264,7 +265,7 @@ public void close() {
ThreadLocals.clearReferencesThreadLocals();
- logger.info("{{}}: Closed", Version.full());
+ logger.info("{{}}[{}]: Closed", Version.full(), JvmConfig.jvmConfig().pid());
}
public Injector injector() {
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/modules/elasticsearch/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
index d48d445a32ee6..32da99a2c039d 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
@@ -39,5 +39,7 @@ public interface ThreadPool extends ScheduledExecutorService {
Future<?> submit(Runnable task, FutureListener<?> listener);
- public ScheduledFuture<?> scheduleWithFixedDelay(Runnable command, TimeValue interval);
+ public ScheduledFuture<?> schedule(Runnable command, TimeValue delay);
+
+ ScheduledFuture<?> scheduleWithFixedDelay(Runnable command, TimeValue interval);
}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/threadpool/support/AbstractThreadPool.java b/modules/elasticsearch/src/main/java/org/elasticsearch/threadpool/support/AbstractThreadPool.java
index d0818480c741b..1fb55ae6fbc30 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/threadpool/support/AbstractThreadPool.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/threadpool/support/AbstractThreadPool.java
@@ -120,6 +120,10 @@ protected AbstractThreadPool(Settings settings) {
return executorService.submit(new FutureRunnable(task, null, listener));
}
+ @Override public ScheduledFuture<?> schedule(Runnable command, TimeValue delay) {
+ return schedule(command, delay.millis(), TimeUnit.MILLISECONDS);
+ }
+
@Override public ScheduledFuture<?> scheduleWithFixedDelay(Runnable command, TimeValue interval) {
return scheduleWithFixedDelay(command, interval.millis(), interval.millis(), TimeUnit.MILLISECONDS);
}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/timer/TimerService.java b/modules/elasticsearch/src/main/java/org/elasticsearch/timer/TimerService.java
index 942c5ebb37bec..ac123fffd349c 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/timer/TimerService.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/timer/TimerService.java
@@ -23,6 +23,7 @@
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.util.TimeValue;
import org.elasticsearch.util.component.AbstractComponent;
+import org.elasticsearch.util.settings.ImmutableSettings;
import org.elasticsearch.util.settings.Settings;
import org.elasticsearch.util.timer.HashedWheelTimer;
import org.elasticsearch.util.timer.Timeout;
@@ -50,6 +51,12 @@ public class TimerService extends AbstractComponent {
private final TimeValue tickDuration;
+ private final int ticksPerWheel;
+
+ public TimerService(ThreadPool threadPool) {
+ this(ImmutableSettings.Builder.EMPTY_SETTINGS, threadPool);
+ }
+
@Inject public TimerService(Settings settings, ThreadPool threadPool) {
super(settings);
this.threadPool = threadPool;
@@ -58,8 +65,9 @@ public class TimerService extends AbstractComponent {
this.timeEstimatorFuture = threadPool.scheduleWithFixedDelay(timeEstimator, 50, 50, TimeUnit.MILLISECONDS);
this.tickDuration = componentSettings.getAsTime("tick_duration", timeValueMillis(100));
+ this.ticksPerWheel = componentSettings.getAsInt("ticks_per_wheel", 1024);
- this.timer = new HashedWheelTimer(logger, daemonThreadFactory(settings, "timer"), tickDuration.millis(), TimeUnit.MILLISECONDS);
+ this.timer = new HashedWheelTimer(logger, daemonThreadFactory(settings, "timer"), tickDuration.millis(), TimeUnit.MILLISECONDS, ticksPerWheel);
}
public void close() {
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/ConnectTransportException.java b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/ConnectTransportException.java
index 6b0abb69ec676..ed1a9db654629 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/ConnectTransportException.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/ConnectTransportException.java
@@ -22,7 +22,7 @@
import org.elasticsearch.cluster.node.DiscoveryNode;
/**
- * @author kimchy (Shay Banon)
+ * @author kimchy (shay.banon)
*/
public class ConnectTransportException extends TransportException {
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/NodeDisconnectedTransportException.java b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/NodeDisconnectedTransportException.java
new file mode 100644
index 0000000000000..c40e8a0a87d96
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/NodeDisconnectedTransportException.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.transport;
+
+import org.elasticsearch.cluster.node.DiscoveryNode;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class NodeDisconnectedTransportException extends RemoteTransportException {
+
+ public NodeDisconnectedTransportException(DiscoveryNode node, String action) {
+ super(node.name(), node.address(), action, null);
+ }
+
+// @Override public Throwable fillInStackTrace() {
+// return fillStack();
+// }
+}
\ No newline at end of file
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/ReceiveTimeoutTransportException.java b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/ReceiveTimeoutTransportException.java
new file mode 100644
index 0000000000000..7737aa6ca8002
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/ReceiveTimeoutTransportException.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.transport;
+
+import org.elasticsearch.cluster.node.DiscoveryNode;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class ReceiveTimeoutTransportException extends RemoteTransportException {
+
+ public ReceiveTimeoutTransportException(DiscoveryNode node, String action) {
+ super(node.name(), node.address(), action, null);
+ }
+
+// @Override public Throwable fillInStackTrace() {
+// return fillStack();
+// }
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java
index cb5c6d2e3f398..6bb95e7d7c710 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java
@@ -28,9 +28,9 @@ public interface TransportResponseHandler<T extends Streamable> {
/**
* creates a new instance of the return type from the remote call.
- * called by the infra before deserializing the response.
+ * called by the infra before de-serializing the response.
*
- * @return a new reponse copy.
+ * @return a new response copy.
*/
T newInstance();
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/TransportService.java b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/TransportService.java
index cc517d0c8dfce..0339c6502bfdc 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/TransportService.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/TransportService.java
@@ -23,13 +23,18 @@
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.timer.TimerService;
+import org.elasticsearch.util.TimeValue;
import org.elasticsearch.util.component.AbstractLifecycleComponent;
import org.elasticsearch.util.concurrent.highscalelib.NonBlockingHashMapLong;
import org.elasticsearch.util.io.stream.Streamable;
import org.elasticsearch.util.settings.Settings;
+import org.elasticsearch.util.timer.Timeout;
+import org.elasticsearch.util.timer.TimerTask;
import org.elasticsearch.util.transport.BoundTransportAddress;
import org.elasticsearch.util.transport.TransportAddress;
+import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicLong;
@@ -46,9 +51,11 @@ public class TransportService extends AbstractLifecycleComponent<TransportServic
private final ThreadPool threadPool;
+ private final TimerService timerService;
+
final ConcurrentMap<String, TransportRequestHandler> serverHandlers = newConcurrentMap();
- final NonBlockingHashMapLong<TransportResponseHandler> clientHandlers = new NonBlockingHashMapLong<TransportResponseHandler>();
+ final NonBlockingHashMapLong<RequestHolder> clientHandlers = new NonBlockingHashMapLong<RequestHolder>();
final AtomicLong requestIds = new AtomicLong();
@@ -56,39 +63,20 @@ public class TransportService extends AbstractLifecycleComponent<TransportServic
private boolean throwConnectException = false;
- public TransportService(Transport transport, ThreadPool threadPool) {
- this(EMPTY_SETTINGS, transport, threadPool);
+ public TransportService(Transport transport, ThreadPool threadPool, TimerService timerService) {
+ this(EMPTY_SETTINGS, transport, threadPool, timerService);
}
- @Inject public TransportService(Settings settings, Transport transport, ThreadPool threadPool) {
+ @Inject public TransportService(Settings settings, Transport transport, ThreadPool threadPool, TimerService timerService) {
super(settings);
this.transport = transport;
this.threadPool = threadPool;
+ this.timerService = timerService;
}
@Override protected void doStart() throws ElasticSearchException {
// register us as an adapter for the transport service
- transport.transportServiceAdapter(new TransportServiceAdapter() {
- @Override public TransportRequestHandler handler(String action) {
- return serverHandlers.get(action);
- }
-
- @Override public TransportResponseHandler remove(long requestId) {
- return clientHandlers.remove(requestId);
- }
-
- @Override public void raiseNodeConnected(DiscoveryNode node) {
- for (TransportConnectionListener connectionListener : connectionListeners) {
- connectionListener.onNodeConnected(node);
- }
- }
-
- @Override public void raiseNodeDisconnected(DiscoveryNode node) {
- for (TransportConnectionListener connectionListener : connectionListeners) {
- connectionListener.onNodeDisconnected(node);
- }
- }
- });
+ transport.transportServiceAdapter(new Adapter());
transport.start();
if (transport.boundAddress() != null && logger.isInfoEnabled()) {
logger.info("{}", transport.boundAddress());
@@ -144,16 +132,30 @@ public void throwConnectException(boolean throwConnectException) {
public <T extends Streamable> TransportFuture<T> submitRequest(DiscoveryNode node, String action, Streamable message,
TransportResponseHandler<T> handler) throws TransportException {
+ return submitRequest(node, action, message, null, handler);
+ }
+
+ public <T extends Streamable> TransportFuture<T> submitRequest(DiscoveryNode node, String action, Streamable message,
+ TimeValue timeout, TransportResponseHandler<T> handler) throws TransportException {
PlainTransportFuture<T> futureHandler = new PlainTransportFuture<T>(handler);
- sendRequest(node, action, message, futureHandler);
+ sendRequest(node, action, message, timeout, futureHandler);
return futureHandler;
}
public <T extends Streamable> void sendRequest(final DiscoveryNode node, final String action, final Streamable message,
final TransportResponseHandler<T> handler) throws TransportException {
+ sendRequest(node, action, message, null, handler);
+ }
+
+ public <T extends Streamable> void sendRequest(final DiscoveryNode node, final String action, final Streamable message,
+ final TimeValue timeout, final TransportResponseHandler<T> handler) throws TransportException {
final long requestId = newRequestId();
try {
- clientHandlers.put(requestId, handler);
+ Timeout timeoutX = null;
+ if (timeout != null) {
+ timeoutX = timerService.newTimeout(new TimeoutTimerTask(requestId), timeout);
+ }
+ clientHandlers.put(requestId, new RequestHolder<T>(handler, node, action, timeoutX));
transport.sendRequest(node, requestId, action, message, handler);
} catch (final Exception e) {
// usually happen either because we failed to connect to the node
@@ -183,10 +185,105 @@ public void registerHandler(ActionTransportRequestHandler handler) {
}
public void registerHandler(String action, TransportRequestHandler handler) {
- serverHandlers.put(action, handler);
+ TransportRequestHandler handlerReplaced = serverHandlers.put(action, handler);
+ if (handlerReplaced != null) {
+ logger.warn("Registered two transport handlers for action {}, handlers: {}, {}", action, handler, handlerReplaced);
+ }
}
public void removeHandler(String action) {
serverHandlers.remove(action);
}
+
+ class Adapter implements TransportServiceAdapter {
+ @Override public TransportRequestHandler handler(String action) {
+ return serverHandlers.get(action);
+ }
+
+ @Override public TransportResponseHandler remove(long requestId) {
+ RequestHolder holder = clientHandlers.remove(requestId);
+ if (holder == null) {
+ return null;
+ }
+ if (holder.timeout() != null) {
+ holder.timeout().cancel();
+ }
+ return holder.handler();
+ }
+
+ @Override public void raiseNodeConnected(DiscoveryNode node) {
+ for (TransportConnectionListener connectionListener : connectionListeners) {
+ connectionListener.onNodeConnected(node);
+ }
+ }
+
+ @Override public void raiseNodeDisconnected(DiscoveryNode node) {
+ for (TransportConnectionListener connectionListener : connectionListeners) {
+ connectionListener.onNodeDisconnected(node);
+ }
+ // node got disconnected, raise disconnection on possible ongoing handlers
+ for (Map.Entry<Long, RequestHolder> entry : clientHandlers.entrySet()) {
+ RequestHolder holder = entry.getValue();
+ if (holder.node().equals(node)) {
+ holder = clientHandlers.remove(entry.getKey());
+ if (holder != null) {
+ holder.handler().handleException(new NodeDisconnectedTransportException(node, holder.action()));
+ }
+ }
+ }
+ }
+ }
+
+ class TimeoutTimerTask implements TimerTask {
+
+ private final long requestId;
+
+ TimeoutTimerTask(long requestId) {
+ this.requestId = requestId;
+ }
+
+ @Override public void run(Timeout timeout) throws Exception {
+ if (timeout.isCancelled()) {
+ return;
+ }
+ RequestHolder holder = clientHandlers.remove(requestId);
+ if (holder != null) {
+ holder.handler().handleException(new ReceiveTimeoutTransportException(holder.node(), holder.action()));
+ }
+ }
+ }
+
+ static class RequestHolder<T extends Streamable> {
+
+ private final TransportResponseHandler<T> handler;
+
+ private final DiscoveryNode node;
+
+ private final String action;
+
+ private final Timeout timeout;
+
+ RequestHolder(TransportResponseHandler<T> handler, DiscoveryNode node, String action, Timeout timeout) {
+ this.handler = handler;
+ this.node = node;
+ this.action = action;
+ this.timeout = timeout;
+ }
+
+ public TransportResponseHandler<T> handler() {
+ return handler;
+ }
+
+ public DiscoveryNode node() {
+ return this.node;
+ }
+
+ public String action() {
+ return this.action;
+ }
+
+ public Timeout timeout() {
+ return timeout;
+ }
+ }
}
\ No newline at end of file
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/local/LocalTransport.java b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/local/LocalTransport.java
index a00ff6974fc65..7ab21d47c757c 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/local/LocalTransport.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/local/LocalTransport.java
@@ -193,13 +193,22 @@ void messageReceived(byte[] data, String action, LocalTransport sourceTransport,
private void handleRequest(StreamInput stream, long requestId, LocalTransport sourceTransport) throws Exception {
final String action = stream.readUTF();
final LocalTransportChannel transportChannel = new LocalTransportChannel(this, sourceTransport, action, requestId);
- final TransportRequestHandler handler = transportServiceAdapter.handler(action);
- if (handler == null) {
- throw new ActionNotFoundTransportException("Action [" + action + "] not found");
+ try {
+ final TransportRequestHandler handler = transportServiceAdapter.handler(action);
+ if (handler == null) {
+ throw new ActionNotFoundTransportException("Action [" + action + "] not found");
+ }
+ final Streamable streamable = handler.newInstance();
+ streamable.readFrom(stream);
+ handler.messageReceived(streamable, transportChannel);
+ } catch (Exception e) {
+ try {
+ transportChannel.sendResponse(e);
+ } catch (IOException e1) {
+ logger.warn("Failed to send error message back to client for action [" + action + "]", e);
+ logger.warn("Actual Exception", e1);
+ }
}
- final Streamable streamable = handler.newInstance();
- streamable.readFrom(stream);
- handler.messageReceived(streamable, transportChannel);
}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java
index 942b25122584c..4741106f840ce 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java
@@ -19,7 +19,6 @@
package org.elasticsearch.transport.netty;
-import com.google.common.collect.Lists;
import com.google.inject.Inject;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.ElasticSearchIllegalStateException;
@@ -29,6 +28,7 @@
import org.elasticsearch.util.SizeValue;
import org.elasticsearch.util.TimeValue;
import org.elasticsearch.util.component.AbstractLifecycleComponent;
+import org.elasticsearch.util.io.NetworkUtils;
import org.elasticsearch.util.io.stream.BytesStreamOutput;
import org.elasticsearch.util.io.stream.HandlesStreamOutput;
import org.elasticsearch.util.io.stream.Streamable;
@@ -51,7 +51,6 @@
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
-import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
@@ -61,11 +60,12 @@
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
+import static com.google.common.collect.Lists.*;
import static org.elasticsearch.transport.Transport.Helper.*;
import static org.elasticsearch.util.TimeValue.*;
import static org.elasticsearch.util.concurrent.ConcurrentMaps.*;
import static org.elasticsearch.util.concurrent.DynamicExecutors.*;
-import static org.elasticsearch.util.io.HostResolver.*;
+import static org.elasticsearch.util.io.NetworkUtils.*;
import static org.elasticsearch.util.settings.ImmutableSettings.Builder.*;
import static org.elasticsearch.util.transport.NetworkExceptionHelper.*;
@@ -94,8 +94,6 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
final int connectionsPerNode;
- final int connectRetries;
-
final Boolean tcpNoDelay;
final Boolean tcpKeepAlive;
@@ -138,10 +136,9 @@ public NettyTransport(ThreadPool threadPool) {
this.connectionsPerNode = componentSettings.getAsInt("connections_per_node", 5);
this.publishHost = componentSettings.get("publish_host");
this.connectTimeout = componentSettings.getAsTime("connect_timeout", timeValueSeconds(1));
- this.connectRetries = componentSettings.getAsInt("connect_retries", 2);
this.tcpNoDelay = componentSettings.getAsBoolean("tcp_no_delay", true);
this.tcpKeepAlive = componentSettings.getAsBoolean("tcp_keep_alive", null);
- this.reuseAddress = componentSettings.getAsBoolean("reuse_address", null);
+ this.reuseAddress = componentSettings.getAsBoolean("reuse_address", NetworkUtils.defaultReuseAddress());
this.tcpSendBufferSize = componentSettings.getAsSize("tcp_send_buffer_size", null);
this.tcpReceiveBufferSize = componentSettings.getAsSize("tcp_receive_buffer_size", null);
}
@@ -260,17 +257,7 @@ ThreadPool threadPool() {
InetSocketAddress boundAddress = (InetSocketAddress) serverChannel.getLocalAddress();
InetSocketAddress publishAddress;
try {
- InetAddress publishAddressX = resolvePublishHostAddress(publishHost, settings);
- if (publishAddressX == null) {
- // if its 0.0.0.0, we can't publish that.., default to the local ip address
- if (boundAddress.getAddress().isAnyLocalAddress()) {
- publishAddress = new InetSocketAddress(resolvePublishHostAddress(publishHost, settings, LOCAL_IP), boundAddress.getPort());
- } else {
- publishAddress = boundAddress;
- }
- } else {
- publishAddress = new InetSocketAddress(publishAddressX, boundAddress.getPort());
- }
+ publishAddress = new InetSocketAddress(resolvePublishHostAddress(publishHost, settings), boundAddress.getPort());
} catch (Exception e) {
throw new BindTransportException("Failed to resolve publish address", e);
}
@@ -412,59 +399,35 @@ TransportAddress wrapAddress(SocketAddress socketAddress) {
if (nodeConnections != null) {
return;
}
- // build connection(s) to the node
- ArrayList<Channel> channels = new ArrayList<Channel>();
- Throwable lastConnectException = null;
+ List<ChannelFuture> connectFutures = newArrayList();
for (int connectionIndex = 0; connectionIndex < connectionsPerNode; connectionIndex++) {
- for (int i = 1; i <= connectRetries; i++) {
- if (!lifecycle.started()) {
- for (Channel channel1 : channels) {
- channel1.close().awaitUninterruptibly();
- }
- throw new ConnectTransportException(node, "Can't connect when the transport is stopped");
- }
- InetSocketAddress address = ((InetSocketTransportAddress) node.address()).address();
- ChannelFuture channelFuture = clientBootstrap.connect(address);
- channelFuture.awaitUninterruptibly((long) (connectTimeout.millis() * 1.25));
- if (!channelFuture.isSuccess()) {
- // we failed to connect, check if we need to bail or retry
- if (i == connectRetries && connectionIndex == 0) {
- lastConnectException = channelFuture.getCause();
- if (connectionIndex == 0) {
- throw new ConnectTransportException(node, "connectTimeout[" + connectTimeout + "], connectRetries[" + connectRetries + "]", lastConnectException);
- } else {
- // break out of the retry loop, try another connection
- break;
- }
- } else {
- logger.trace("Retry #[" + i + "], connect to [" + node + "]");
- try {
- channelFuture.getChannel().close();
- } catch (Exception e) {
- // ignore
- }
- continue;
- }
- }
- // we got a connection, add it to our connections
- Channel channel = channelFuture.getChannel();
- if (!lifecycle.started()) {
- channel.close();
- for (Channel channel1 : channels) {
- channel1.close().awaitUninterruptibly();
- }
- throw new ConnectTransportException(node, "Can't connect when the transport is stopped");
+ InetSocketAddress address = ((InetSocketTransportAddress) node.address()).address();
+ connectFutures.add(clientBootstrap.connect(address));
+
+ }
+ List<Channel> channels = newArrayList();
+ Throwable lastConnectException = null;
+ for (ChannelFuture connectFuture : connectFutures) {
+ if (!lifecycle.started()) {
+ for (Channel channel : channels) {
+ channel.close().awaitUninterruptibly();
}
+ throw new ConnectTransportException(node, "Can't connect when the transport is stopped");
+ }
+ connectFuture.awaitUninterruptibly((long) (connectTimeout.millis() * 1.25));
+ if (!connectFuture.isSuccess()) {
+ lastConnectException = connectFuture.getCause();
+ } else {
+ Channel channel = connectFuture.getChannel();
channel.getCloseFuture().addListener(new ChannelCloseListener(node.id()));
channels.add(channel);
- break;
}
}
if (channels.isEmpty()) {
if (lastConnectException != null) {
- throw new ConnectTransportException(node, "connectTimeout[" + connectTimeout + "], connectRetries[" + connectRetries + "]", lastConnectException);
+ throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", lastConnectException);
}
- throw new ConnectTransportException(node, "connectTimeout[" + connectTimeout + "], connectRetries[" + connectRetries + "], reason unknown");
+ throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "], reason unknown");
}
if (logger.isDebugEnabled()) {
logger.debug("Connected to node[{}], number_of_connections[{}]", node, channels.size());
@@ -516,7 +479,7 @@ private Channel channel() {
}
private void channelClosed(Channel closedChannel) {
- List<Channel> updated = Lists.newArrayList();
+ List<Channel> updated = newArrayList();
for (Channel channel : channels) {
if (!channel.getId().equals(closedChannel.getId())) {
updated.add(channel);
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/netty/NettyTransportManagement.java b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/netty/NettyTransportManagement.java
index edf27c3f3d060..e60510d00207c 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/netty/NettyTransportManagement.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/netty/NettyTransportManagement.java
@@ -65,11 +65,6 @@ public String getConnectTimeout() {
return transport.connectTimeout.toString();
}
- @ManagedAttribute(description = "Connect retries")
- public int getConnectRetries() {
- return transport.connectRetries;
- }
-
@ManagedAttribute(description = "TcpNoDelay")
public Boolean getTcpNoDelay() {
return transport.tcpNoDelay;
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/util/io/HostResolver.java b/modules/elasticsearch/src/main/java/org/elasticsearch/util/io/HostResolver.java
deleted file mode 100644
index 0a536ac4fb44b..0000000000000
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/util/io/HostResolver.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * Licensed to Elastic Search and Shay Banon under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Elastic Search licenses this
- * file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.util.io;
-
-import org.elasticsearch.util.settings.Settings;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.NetworkInterface;
-import java.net.SocketException;
-import java.net.UnknownHostException;
-import java.util.Enumeration;
-
-/**
- * @author kimchy (Shay Banon)
- */
-public abstract class HostResolver {
-
- public static final String GLOBAL_NETWORK_BINDHOST_SETTING = "network.bind_host";
- public static final String GLOBAL_NETWORK_PUBLISHHOST_SETTING = "network.publish_host";
-
- public static final String LOCAL_IP = "#local:ip#";
- public static final String LOCAL_HOST = "#local:host#";
- public static final String LOCAL_CANONICALHOST = "#local:canonicalhost#";
-
- public static boolean isIPv4() {
- return System.getProperty("java.net.preferIPv4Stack") != null && System.getProperty("java.net.preferIPv4Stack").equals("true");
- }
-
- public static InetAddress resolveBindHostAddress(String bindHost, Settings settings) throws IOException {
- return resolveBindHostAddress(bindHost, settings, null);
- }
-
- public static InetAddress resolveBindHostAddress(String bindHost, Settings settings, String defaultValue2) throws IOException {
- return resolveInetAddress(bindHost, settings.get(GLOBAL_NETWORK_BINDHOST_SETTING), defaultValue2);
- }
-
- public static InetAddress resolvePublishHostAddress(String publishHost, Settings settings) throws IOException {
- return resolvePublishHostAddress(publishHost, settings, null);
- }
-
- public static InetAddress resolvePublishHostAddress(String publishHost, Settings settings, String defaultValue2) throws IOException {
- return resolveInetAddress(publishHost, settings.get(GLOBAL_NETWORK_PUBLISHHOST_SETTING), defaultValue2);
- }
-
- public static InetAddress resolveInetAddress(String host, String defaultValue1, String defaultValue2) throws UnknownHostException, IOException {
- String resolvedHost = resolveHost(host, defaultValue1, defaultValue2);
- if (resolvedHost == null) {
- return null;
- }
- return InetAddress.getByName(resolvedHost);
- }
-
- public static String resolveHost(String host, String defaultValue1, String defaultValue2) throws UnknownHostException, IOException {
- if (host == null) {
- host = defaultValue1;
- }
- if (host == null) {
- host = defaultValue2;
- }
- if (host == null) {
- return null;
- }
- if (host.startsWith("#") && host.endsWith("#")) {
- host = host.substring(1, host.length() - 1);
- if (host.equals("local:ip")) {
- return InetAddress.getLocalHost().getHostAddress();
- } else if (host.equalsIgnoreCase("local:host")) {
- return InetAddress.getLocalHost().getHostName();
- } else if (host.equalsIgnoreCase("local:canonicalhost")) {
- return InetAddress.getLocalHost().getCanonicalHostName();
- } else {
- String name = host.substring(0, host.indexOf(':'));
- String type = host.substring(host.indexOf(':') + 1);
- Enumeration<NetworkInterface> niEnum;
- try {
- niEnum = NetworkInterface.getNetworkInterfaces();
- } catch (SocketException e) {
- throw new IOException("Failed to get network interfaces", e);
- }
- while (niEnum.hasMoreElements()) {
- NetworkInterface ni = niEnum.nextElement();
- if (name.equals(ni.getName()) || name.equals(ni.getDisplayName())) {
- Enumeration<InetAddress> inetEnum = ni.getInetAddresses();
- while (inetEnum.hasMoreElements()) {
- InetAddress addr = inetEnum.nextElement();
- if (addr.getHostAddress().equals("127.0.0.1")) {
- // ignore local host
- continue;
- }
- if (addr.getHostAddress().indexOf(".") == -1) {
- // ignore address like 0:0:0:0:0:0:0:1
- continue;
- }
- if ("host".equalsIgnoreCase(type)) {
- return addr.getHostName();
- } else if ("canonicalhost".equalsIgnoreCase(type)) {
- return addr.getCanonicalHostName();
- } else {
- return addr.getHostAddress();
- }
- }
- }
- }
- }
- throw new IOException("Failed to find network interface for [" + host + "]");
- }
- InetAddress inetAddress = java.net.InetAddress.getByName(host);
- String hostAddress = inetAddress.getHostAddress();
- String hostName = inetAddress.getHostName();
- String canonicalHostName = inetAddress.getCanonicalHostName();
- if (host.equalsIgnoreCase(hostAddress)) {
- return hostAddress;
- } else if (host.equalsIgnoreCase(canonicalHostName)) {
- return canonicalHostName;
- } else {
- return hostName; //resolve property into actual lower/upper case
- }
- }
-
- private HostResolver() {
-
- }
-}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/util/io/NetworkUtils.java b/modules/elasticsearch/src/main/java/org/elasticsearch/util/io/NetworkUtils.java
new file mode 100644
index 0000000000000..a21be4c5cdb7c
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/util/io/NetworkUtils.java
@@ -0,0 +1,286 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.util.io;
+
+import org.elasticsearch.util.OsUtils;
+import org.elasticsearch.util.logging.ESLogger;
+import org.elasticsearch.util.logging.Loggers;
+import org.elasticsearch.util.settings.Settings;
+
+import java.io.IOException;
+import java.net.*;
+import java.util.*;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public abstract class NetworkUtils {
+
+ private final static ESLogger logger = Loggers.getLogger(NetworkUtils.class);
+
+ public static enum StackType {
+ IPv4, IPv6, Unknown
+ }
+
+ public static final String IPv4_SETTING = "java.net.preferIPv4Stack";
+ public static final String IPv6_SETTING = "java.net.preferIPv6Addresses";
+
+ public static final String NON_LOOPBACK_ADDRESS = "non_loopback_address";
+ public static final String LOCAL = "#local#";
+
+ public static final String GLOBAL_NETWORK_BINDHOST_SETTING = "network.bind_host";
+ public static final String GLOBAL_NETWORK_PUBLISHHOST_SETTING = "network.publish_host";
+
+ private final static InetAddress localAddress;
+
+ static {
+ InetAddress localAddressX = null;
+ try {
+ localAddressX = InetAddress.getLocalHost();
+ } catch (UnknownHostException e) {
+ logger.warn("Failed to find local host", e);
+ }
+ localAddress = localAddressX;
+ }
+
+ public static Boolean defaultReuseAddress() {
+ return OsUtils.WINDOWS ? null : true;
+ }
+
+ public static boolean isIPv4() {
+ return System.getProperty("java.net.preferIPv4Stack") != null && System.getProperty("java.net.preferIPv4Stack").equals("true");
+ }
+
+ public static InetAddress resolveBindHostAddress(String bindHost, Settings settings) throws IOException {
+ return resolveBindHostAddress(bindHost, settings, null);
+ }
+
+ public static InetAddress resolveBindHostAddress(String bindHost, Settings settings, String defaultValue2) throws IOException {
+ return resolveInetAddress(bindHost, settings.get(GLOBAL_NETWORK_BINDHOST_SETTING), defaultValue2);
+ }
+
+ public static InetAddress resolvePublishHostAddress(String publishHost, Settings settings) throws IOException {
+ InetAddress address = resolvePublishHostAddress(publishHost, settings, null);
+ // verify that its not a local address
+ if (address == null || address.isAnyLocalAddress()) {
+ address = localAddress;
+ }
+ return address;
+ }
+
+ public static InetAddress resolvePublishHostAddress(String publishHost, Settings settings, String defaultValue2) throws IOException {
+ return resolveInetAddress(publishHost, settings.get(GLOBAL_NETWORK_PUBLISHHOST_SETTING), defaultValue2);
+ }
+
+ public static InetAddress resolveInetAddress(String host, String defaultValue1, String defaultValue2) throws UnknownHostException, IOException {
+ if (host == null) {
+ host = defaultValue1;
+ }
+ if (host == null) {
+ host = defaultValue2;
+ }
+ if (host == null) {
+ return null;
+ }
+ if (host.startsWith("#") && host.endsWith("#")) {
+ host = host.substring(1, host.length() - 1);
+ if (host.equals("local")) {
+ return localAddress;
+ } else {
+ Collection<NetworkInterface> allInterfs = getAllAvailableInterfaces();
+ for (NetworkInterface ni : allInterfs) {
+ if (!ni.isUp() || ni.isLoopback()) {
+ continue;
+ }
+ if (host.equals(ni.getName()) || host.equals(ni.getDisplayName())) {
+ return getFirstNonLoopbackAddress(ni, getIpStackType());
+ }
+ }
+ }
+ throw new IOException("Failed to find network interface for [" + host + "]");
+ }
+ return InetAddress.getByName(host);
+ }
+
+ public static InetAddress getIPv4Localhost() throws UnknownHostException {
+ return getLocalhost(StackType.IPv4);
+ }
+
+ public static InetAddress getIPv6Localhost() throws UnknownHostException {
+ return getLocalhost(StackType.IPv6);
+ }
+
+ public static InetAddress getLocalhost(StackType ip_version) throws UnknownHostException {
+ if (ip_version == StackType.IPv4)
+ return InetAddress.getByName("127.0.0.1");
+ else
+ return InetAddress.getByName("::1");
+ }
+
+
+ /**
+ * Returns the first non-loopback address on any interface on the current host.
+ *
+ * @param ip_version Constraint on IP version of address to be returned, 4 or 6
+ */
+ public static InetAddress getFirstNonLoopbackAddress(StackType ip_version) throws SocketException {
+ InetAddress address = null;
+
+ Enumeration intfs = NetworkInterface.getNetworkInterfaces();
+ while (intfs.hasMoreElements()) {
+ NetworkInterface intf = (NetworkInterface) intfs.nextElement();
+ if (!intf.isUp() || intf.isLoopback())
+ continue;
+ address = getFirstNonLoopbackAddress(intf, ip_version);
+ if (address != null) {
+ return address;
+ }
+ }
+ return null;
+ }
+
+
+ /**
+ * Returns the first non-loopback address on the given interface on the current host.
+ *
+ * @param intf the interface to be checked
+ * @param ipVersion Constraint on IP version of address to be returned, 4 or 6
+ */
+ public static InetAddress getFirstNonLoopbackAddress(NetworkInterface intf, StackType ipVersion) throws SocketException {
+ if (intf == null)
+ throw new IllegalArgumentException("Network interface pointer is null");
+
+ for (Enumeration addresses = intf.getInetAddresses(); addresses.hasMoreElements();) {
+ InetAddress address = (InetAddress) addresses.nextElement();
+ if (!address.isLoopbackAddress()) {
+ if ((address instanceof Inet4Address && ipVersion == StackType.IPv4) ||
+ (address instanceof Inet6Address && ipVersion == StackType.IPv6))
+ return address;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * A function to check if an interface supports an IP version (i.e has addresses
+ * defined for that IP version).
+ *
+ * @param intf
+ * @return
+ */
+ public static boolean interfaceHasIPAddresses(NetworkInterface intf, StackType ipVersion) throws SocketException, UnknownHostException {
+ boolean supportsVersion = false;
+ if (intf != null) {
+ // get all the InetAddresses defined on the interface
+ Enumeration addresses = intf.getInetAddresses();
+ while (addresses != null && addresses.hasMoreElements()) {
+ // get the next InetAddress for the current interface
+ InetAddress address = (InetAddress) addresses.nextElement();
+
+ // check if we find an address of correct version
+ if ((address instanceof Inet4Address && (ipVersion == StackType.IPv4)) ||
+ (address instanceof Inet6Address && (ipVersion == StackType.IPv6))) {
+ supportsVersion = true;
+ break;
+ }
+ }
+ } else {
+ throw new UnknownHostException("network interface " + intf + " not found");
+ }
+ return supportsVersion;
+ }
+
+ /**
+ * Tries to determine the type of IP stack from the available interfaces and their addresses and from the
+ * system properties (java.net.preferIPv4Stack and java.net.preferIPv6Addresses)
+ *
+ * @return StackType.IPv4 for an IPv4 only stack, StackYTypeIPv6 for an IPv6 only stack, and StackType.Unknown
+ * if the type cannot be detected
+ */
+ public static StackType getIpStackType() {
+ boolean isIPv4StackAvailable = isStackAvailable(true);
+ boolean isIPv6StackAvailable = isStackAvailable(false);
+
+ // if only IPv4 stack available
+ if (isIPv4StackAvailable && !isIPv6StackAvailable) {
+ return StackType.IPv4;
+ }
+ // if only IPv6 stack available
+ else if (isIPv6StackAvailable && !isIPv4StackAvailable) {
+ return StackType.IPv6;
+ }
+ // if dual stack
+ else if (isIPv4StackAvailable && isIPv6StackAvailable) {
+ // get the System property which records user preference for a stack on a dual stack machine
+ if (Boolean.getBoolean(IPv4_SETTING)) // has preference over java.net.preferIPv6Addresses
+ return StackType.IPv4;
+ if (Boolean.getBoolean(IPv6_SETTING))
+ return StackType.IPv6;
+ return StackType.IPv6;
+ }
+ return StackType.Unknown;
+ }
+
+
+ public static boolean isStackAvailable(boolean ipv4) {
+ Collection<InetAddress> allAddrs = getAllAvailableAddresses();
+ for (InetAddress addr : allAddrs)
+ if (ipv4 && addr instanceof Inet4Address || (!ipv4 && addr instanceof Inet6Address))
+ return true;
+ return false;
+ }
+
+
+ public static List<NetworkInterface> getAllAvailableInterfaces() throws SocketException {
+ List<NetworkInterface> allInterfaces = new ArrayList<NetworkInterface>(10);
+ NetworkInterface intf;
+ for (Enumeration en = NetworkInterface.getNetworkInterfaces(); en.hasMoreElements();) {
+ intf = (NetworkInterface) en.nextElement();
+ allInterfaces.add(intf);
+ }
+ return allInterfaces;
+ }
+
+ public static Collection<InetAddress> getAllAvailableAddresses() {
+ Set<InetAddress> retval = new HashSet<InetAddress>();
+ Enumeration en;
+
+ try {
+ en = NetworkInterface.getNetworkInterfaces();
+ if (en == null)
+ return retval;
+ while (en.hasMoreElements()) {
+ NetworkInterface intf = (NetworkInterface) en.nextElement();
+ Enumeration<InetAddress> addrs = intf.getInetAddresses();
+ while (addrs.hasMoreElements())
+ retval.add(addrs.nextElement());
+ }
+ } catch (SocketException e) {
+ logger.warn("Failed to derive all available interfaces", e);
+ }
+
+ return retval;
+ }
+
+
+ private NetworkUtils() {
+
+ }
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/util/io/stream/BytesStreamInput.java b/modules/elasticsearch/src/main/java/org/elasticsearch/util/io/stream/BytesStreamInput.java
index e040b157ab04d..2946c02135568 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/util/io/stream/BytesStreamInput.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/util/io/stream/BytesStreamInput.java
@@ -36,9 +36,13 @@ public class BytesStreamInput extends StreamInput {
protected int count;
public BytesStreamInput(byte buf[]) {
+ this(buf, 0, buf.length);
+ }
+
+ public BytesStreamInput(byte buf[], int position, int count) {
this.buf = buf;
- this.pos = 0;
- this.count = buf.length;
+ this.pos = position;
+ this.count = count;
}
@Override public byte readByte() throws IOException {
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/util/timer/TimerTask.java b/modules/elasticsearch/src/main/java/org/elasticsearch/util/timer/TimerTask.java
index 7eeb8f092488c..0fa41d86cd99d 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/util/timer/TimerTask.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/util/timer/TimerTask.java
@@ -23,7 +23,7 @@
* A task which is executed after the delay specified with
* {@link Timer#newTimeout(TimerTask, long, java.util.concurrent.TimeUnit)}.
*
- * @author kimchy (Shay Banon)
+ * @author kimchy (shay.banon)
*/
public interface TimerTask {
diff --git a/modules/elasticsearch/src/test/java/org/elasticsearch/discovery/zen/ping/multicast/MulticastZenPingTests.java b/modules/elasticsearch/src/test/java/org/elasticsearch/discovery/zen/ping/multicast/MulticastZenPingTests.java
new file mode 100644
index 0000000000000..19c39af0b6fc1
--- /dev/null
+++ b/modules/elasticsearch/src/test/java/org/elasticsearch/discovery/zen/ping/multicast/MulticastZenPingTests.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.discovery.zen.ping.multicast;
+
+import org.elasticsearch.cluster.ClusterName;
+import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.discovery.zen.DiscoveryNodesProvider;
+import org.elasticsearch.discovery.zen.ping.ZenPing;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.threadpool.cached.CachedThreadPool;
+import org.elasticsearch.timer.TimerService;
+import org.elasticsearch.transport.TransportService;
+import org.elasticsearch.transport.local.LocalTransport;
+import org.elasticsearch.util.TimeValue;
+import org.testng.annotations.Test;
+
+import static org.hamcrest.MatcherAssert.*;
+import static org.hamcrest.Matchers.*;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+@Test
+public class MulticastZenPingTests {
+
+ @Test public void testSimplePings() {
+ ThreadPool threadPool = new CachedThreadPool();
+ TimerService timerService = new TimerService(threadPool);
+ ClusterName clusterName = new ClusterName("test");
+ final TransportService transportServiceA = new TransportService(new LocalTransport(threadPool), threadPool, timerService).start();
+ final DiscoveryNode nodeA = new DiscoveryNode("A", transportServiceA.boundAddress().publishAddress());
+
+ final TransportService transportServiceB = new TransportService(new LocalTransport(threadPool), threadPool, timerService).start();
+ final DiscoveryNode nodeB = new DiscoveryNode("B", transportServiceA.boundAddress().publishAddress());
+
+ MulticastZenPing zenPingA = (MulticastZenPing) new MulticastZenPing(threadPool, transportServiceA, clusterName);
+ zenPingA.setNodesProvider(new DiscoveryNodesProvider() {
+ @Override public DiscoveryNodes nodes() {
+ return DiscoveryNodes.newNodesBuilder().put(nodeA).localNodeId("A").build();
+ }
+ });
+ zenPingA.start();
+
+ MulticastZenPing zenPingB = (MulticastZenPing) new MulticastZenPing(threadPool, transportServiceB, clusterName);
+ zenPingB.setNodesProvider(new DiscoveryNodesProvider() {
+ @Override public DiscoveryNodes nodes() {
+ return DiscoveryNodes.newNodesBuilder().put(nodeB).localNodeId("B").build();
+ }
+ });
+ zenPingB.start();
+
+ try {
+ ZenPing.PingResponse[] pingResponses = zenPingA.pingAndWait(TimeValue.timeValueSeconds(1));
+ assertThat(pingResponses.length, equalTo(1));
+ assertThat(pingResponses[0].target().id(), equalTo("B"));
+ } finally {
+ zenPingA.close();
+ zenPingB.close();
+ transportServiceA.close();
+ transportServiceB.close();
+ threadPool.shutdown();
+ }
+ }
+}
diff --git a/modules/elasticsearch/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTests.java b/modules/elasticsearch/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTests.java
index 19165542bc572..134da4a976180 100644
--- a/modules/elasticsearch/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTests.java
+++ b/modules/elasticsearch/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTests.java
@@ -21,7 +21,9 @@
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.threadpool.ThreadPool;
-import org.elasticsearch.threadpool.scaling.ScalingThreadPool;
+import org.elasticsearch.threadpool.cached.CachedThreadPool;
+import org.elasticsearch.timer.TimerService;
+import org.elasticsearch.util.TimeValue;
import org.elasticsearch.util.io.stream.StreamInput;
import org.elasticsearch.util.io.stream.StreamOutput;
import org.elasticsearch.util.io.stream.Streamable;
@@ -42,6 +44,7 @@
public abstract class AbstractSimpleTransportTests {
protected ThreadPool threadPool;
+ protected TimerService timerService;
protected TransportService serviceA;
protected TransportService serviceB;
@@ -49,7 +52,8 @@ public abstract class AbstractSimpleTransportTests {
protected DiscoveryNode serviceBNode;
@BeforeMethod public void setUp() {
- threadPool = new ScalingThreadPool();
+ threadPool = new CachedThreadPool();
+ timerService = new TimerService(threadPool);
build();
serviceA.connectToNode(serviceBNode);
serviceB.connectToNode(serviceANode);
@@ -106,6 +110,8 @@ public abstract class AbstractSimpleTransportTests {
assertThat(e.getMessage(), false, equalTo(true));
}
+ serviceA.removeHandler("sayHello");
+
System.out.println("after ...");
}
@@ -144,6 +150,8 @@ public abstract class AbstractSimpleTransportTests {
assertThat("bad message !!!", equalTo(e.getCause().getMessage()));
}
+ serviceA.removeHandler("sayHelloException");
+
System.out.println("after ...");
}
@@ -162,7 +170,53 @@ public void testDisconnectListener() throws Exception {
};
serviceA.addConnectionListener(disconnectListener);
serviceB.close();
- assertThat(latch.await(1, TimeUnit.SECONDS), equalTo(true));
+ assertThat(latch.await(5, TimeUnit.SECONDS), equalTo(true));
+ }
+
+ @Test public void testTimeoutSendException() throws Exception {
+ serviceA.registerHandler("sayHelloTimeout", new BaseTransportRequestHandler<StringMessage>() {
+ @Override public StringMessage newInstance() {
+ return new StringMessage();
+ }
+
+ @Override public void messageReceived(StringMessage request, TransportChannel channel) {
+ System.out.println("got message: " + request.message);
+ assertThat("moshe", equalTo(request.message));
+ // don't send back a response
+// try {
+// channel.sendResponse(new StringMessage("hello " + request.message));
+// } catch (IOException e) {
+// e.printStackTrace();
+// assertThat(e.getMessage(), false, equalTo(true));
+// }
+ }
+ });
+
+ TransportFuture<StringMessage> res = serviceB.submitRequest(serviceANode, "sayHelloTimeout",
+ new StringMessage("moshe"), TimeValue.timeValueMillis(100), new BaseTransportResponseHandler<StringMessage>() {
+ @Override public StringMessage newInstance() {
+ return new StringMessage();
+ }
+
+ @Override public void handleResponse(StringMessage response) {
+ assertThat("got response instead of exception", false, equalTo(true));
+ }
+
+ @Override public void handleException(RemoteTransportException exp) {
+ assertThat(exp, instanceOf(ReceiveTimeoutTransportException.class));
+ }
+ });
+
+ try {
+ StringMessage message = res.txGet();
+ assertThat("exception should be thrown", false, equalTo(true));
+ } catch (Exception e) {
+ assertThat(e, instanceOf(ReceiveTimeoutTransportException.class));
+ }
+
+ serviceA.removeHandler("sayHelloTimeout");
+
+ System.out.println("after ...");
}
private class StringMessage implements Streamable {
diff --git a/modules/elasticsearch/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java b/modules/elasticsearch/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java
index 569619f78f51e..d349901a12595 100644
--- a/modules/elasticsearch/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java
+++ b/modules/elasticsearch/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java
@@ -28,10 +28,10 @@
public class SimpleLocalTransportTests extends AbstractSimpleTransportTests {
@Override protected void build() {
- serviceA = new TransportService(new LocalTransport(threadPool), threadPool).start();
+ serviceA = new TransportService(new LocalTransport(threadPool), threadPool, timerService).start();
serviceANode = new DiscoveryNode("A", serviceA.boundAddress().publishAddress());
- serviceB = new TransportService(new LocalTransport(threadPool), threadPool).start();
+ serviceB = new TransportService(new LocalTransport(threadPool), threadPool, timerService).start();
serviceBNode = new DiscoveryNode("B", serviceB.boundAddress().publishAddress());
}
}
\ No newline at end of file
diff --git a/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java b/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java
index 0c3c4ea65317d..abee21b23890a 100644
--- a/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java
+++ b/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java
@@ -30,11 +30,10 @@
public class SimpleNettyTransportTests extends AbstractSimpleTransportTests {
@Override protected void build() {
- serviceA = new TransportService(settingsBuilder().put("name", "A").build(), new NettyTransport(settingsBuilder().put("name", "A").build(), threadPool), threadPool).start();
+ serviceA = new TransportService(settingsBuilder().put("name", "A").build(), new NettyTransport(settingsBuilder().put("name", "A").build(), threadPool), threadPool, timerService).start();
serviceANode = new DiscoveryNode("A", serviceA.boundAddress().publishAddress());
- serviceB = new TransportService(settingsBuilder().put("name", "B").build(), new NettyTransport(settingsBuilder().put("name", "B").build(), threadPool), threadPool).start();
+ serviceB = new TransportService(settingsBuilder().put("name", "B").build(), new NettyTransport(settingsBuilder().put("name", "B").build(), threadPool), threadPool, timerService).start();
serviceBNode = new DiscoveryNode("B", serviceB.boundAddress().publishAddress());
}
-
}
\ No newline at end of file
diff --git a/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/benchmark/BenchmarkNettyClient.java b/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/benchmark/BenchmarkNettyClient.java
index c877e16e0ba4e..d55e0a7bb6389 100644
--- a/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/benchmark/BenchmarkNettyClient.java
+++ b/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/benchmark/BenchmarkNettyClient.java
@@ -22,6 +22,7 @@
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.threadpool.cached.CachedThreadPool;
+import org.elasticsearch.timer.TimerService;
import org.elasticsearch.transport.BaseTransportResponseHandler;
import org.elasticsearch.transport.RemoteTransportException;
import org.elasticsearch.transport.TransportService;
@@ -56,8 +57,9 @@ public static void main(String[] args) {
.put("transport.netty.connectionsPerNode", 5)
.build();
- final ThreadPool threadPool = new CachedThreadPool();
- final TransportService transportService = new TransportService(new NettyTransport(settings, threadPool), threadPool).start();
+ final ThreadPool threadPool = new CachedThreadPool(settings);
+ final TimerService timerService = new TimerService(settings, threadPool);
+ final TransportService transportService = new TransportService(new NettyTransport(settings, threadPool), threadPool, timerService).start();
final DiscoveryNode node = new DiscoveryNode("server", new InetSocketTransportAddress("localhost", 9999));
diff --git a/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/benchmark/BenchmarkNettyServer.java b/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/benchmark/BenchmarkNettyServer.java
index e2e494d42f0d8..8faea99342c5a 100644
--- a/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/benchmark/BenchmarkNettyServer.java
+++ b/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/benchmark/BenchmarkNettyServer.java
@@ -21,6 +21,7 @@
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.threadpool.cached.CachedThreadPool;
+import org.elasticsearch.timer.TimerService;
import org.elasticsearch.transport.BaseTransportRequestHandler;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportService;
@@ -40,8 +41,9 @@ public static void main(String[] args) {
.put("transport.netty.port", 9999)
.build();
- final ThreadPool threadPool = new CachedThreadPool();
- final TransportService transportService = new TransportService(new NettyTransport(settings, threadPool), threadPool).start();
+ final ThreadPool threadPool = new CachedThreadPool(settings);
+ final TimerService timerService = new TimerService(settings, threadPool);
+ final TransportService transportService = new TransportService(new NettyTransport(settings, threadPool), threadPool, timerService).start();
transportService.registerHandler("benchmark", new BaseTransportRequestHandler<BenchmarkMessage>() {
@Override public BenchmarkMessage newInstance() {
diff --git a/plugins/memcached/src/main/java/org/elasticsearch/memcached/netty/NettyMemcachedServerTransport.java b/plugins/memcached/src/main/java/org/elasticsearch/memcached/netty/NettyMemcachedServerTransport.java
index 29c97fe2a8d32..65473eeba375f 100644
--- a/plugins/memcached/src/main/java/org/elasticsearch/memcached/netty/NettyMemcachedServerTransport.java
+++ b/plugins/memcached/src/main/java/org/elasticsearch/memcached/netty/NettyMemcachedServerTransport.java
@@ -28,6 +28,7 @@
import org.elasticsearch.transport.netty.NettyInternalESLoggerFactory;
import org.elasticsearch.util.SizeValue;
import org.elasticsearch.util.component.AbstractLifecycleComponent;
+import org.elasticsearch.util.io.NetworkUtils;
import org.elasticsearch.util.settings.Settings;
import org.elasticsearch.util.transport.BoundTransportAddress;
import org.elasticsearch.util.transport.InetSocketTransportAddress;
@@ -48,7 +49,7 @@
import java.util.concurrent.atomic.AtomicReference;
import static org.elasticsearch.util.concurrent.DynamicExecutors.*;
-import static org.elasticsearch.util.io.HostResolver.*;
+import static org.elasticsearch.util.io.NetworkUtils.*;
/**
* @author kimchy (shay.banon)
@@ -101,7 +102,7 @@ public class NettyMemcachedServerTransport extends AbstractLifecycleComponent<Me
this.publishHost = componentSettings.get("publish_host");
this.tcpNoDelay = componentSettings.getAsBoolean("tcp_no_delay", true);
this.tcpKeepAlive = componentSettings.getAsBoolean("tcp_keep_alive", null);
- this.reuseAddress = componentSettings.getAsBoolean("reuse_address", null);
+ this.reuseAddress = componentSettings.getAsBoolean("reuse_address", NetworkUtils.defaultReuseAddress());
this.tcpSendBufferSize = componentSettings.getAsSize("tcp_send_buffer_size", null);
this.tcpReceiveBufferSize = componentSettings.getAsSize("tcp_receive_buffer_size", null);
}
@@ -176,17 +177,7 @@ public class NettyMemcachedServerTransport extends AbstractLifecycleComponent<Me
InetSocketAddress boundAddress = (InetSocketAddress) serverChannel.getLocalAddress();
InetSocketAddress publishAddress;
try {
- InetAddress publishAddressX = resolvePublishHostAddress(publishHost, settings);
- if (publishAddressX == null) {
- // if its 0.0.0.0, we can't publish that.., default to the local ip address
- if (boundAddress.getAddress().isAnyLocalAddress()) {
- publishAddress = new InetSocketAddress(resolvePublishHostAddress(publishHost, settings, LOCAL_IP), boundAddress.getPort());
- } else {
- publishAddress = boundAddress;
- }
- } else {
- publishAddress = new InetSocketAddress(publishAddressX, boundAddress.getPort());
- }
+ publishAddress = new InetSocketAddress(resolvePublishHostAddress(publishHost, settings), boundAddress.getPort());
} catch (Exception e) {
throw new BindTransportException("Failed to resolve publish address", e);
}
|
0a545cb738de474fb6dd20bdd8e28e939ab62dae
|
camel
|
CAMEL-2919: Debugger API--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@961615 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/main/java/org/apache/camel/impl/BreakpointSupport.java b/camel-core/src/main/java/org/apache/camel/impl/BreakpointSupport.java
index c87280b7bfe64..dace795250a2e 100644
--- a/camel-core/src/main/java/org/apache/camel/impl/BreakpointSupport.java
+++ b/camel-core/src/main/java/org/apache/camel/impl/BreakpointSupport.java
@@ -16,13 +16,17 @@
*/
package org.apache.camel.impl;
+import java.util.EventObject;
+
import org.apache.camel.Exchange;
+import org.apache.camel.Processor;
+import org.apache.camel.model.ProcessorDefinition;
import org.apache.camel.spi.Breakpoint;
/**
* A support class for {@link Breakpoint} implementations to use as base class.
* <p/>
- * Will be in active state and match any {@link Exchange}s.
+ * Will be in active state.
*
* @version $Revision$
*/
@@ -42,4 +46,15 @@ public void activate() {
state = State.Active;
}
+ public void beforeProcess(Exchange exchange, Processor processor, ProcessorDefinition definition) {
+ // noop
+ }
+
+ public void afterProcess(Exchange exchange, Processor processor, ProcessorDefinition definition) {
+ // noop
+ }
+
+ public void onEvent(Exchange exchange, EventObject event, ProcessorDefinition definition) {
+ // noop
+ }
}
diff --git a/camel-core/src/main/java/org/apache/camel/impl/ConditionSupport.java b/camel-core/src/main/java/org/apache/camel/impl/ConditionSupport.java
new file mode 100644
index 0000000000000..538f122c1bd38
--- /dev/null
+++ b/camel-core/src/main/java/org/apache/camel/impl/ConditionSupport.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.impl;
+
+import java.util.EventObject;
+
+import org.apache.camel.Exchange;
+import org.apache.camel.Processor;
+import org.apache.camel.model.ProcessorDefinition;
+import org.apache.camel.spi.Condition;
+
+/**
+ * A support class for {@link org.apache.camel.spi.Condition} implementations to use as base class.
+ *
+ * @version $Revision$
+ */
+public class ConditionSupport implements Condition {
+
+ public boolean matchProcess(Exchange exchange, Processor processor, ProcessorDefinition definition) {
+ return false;
+ }
+
+ public boolean matchEvent(Exchange exchange, EventObject event) {
+ return false;
+ }
+}
diff --git a/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java b/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java
index 42662acaf56f8..086d43f34524e 100644
--- a/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java
+++ b/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java
@@ -1023,8 +1023,12 @@ private void doStartCamel() throws Exception {
}
}
+ // register debugger
if (getDebugger() != null) {
LOG.info("Debugger: " + getDebugger() + " is enabled on CamelContext: " + getName());
+ // register this camel context on the debugger
+ getDebugger().setCamelContext(this);
+ startServices(getDebugger());
addInterceptStrategy(new Debug(getDebugger()));
}
@@ -1073,6 +1077,7 @@ private void doStartCamel() throws Exception {
routeDefinitionInitiated = true;
}
+
// starting will continue in the start method
}
diff --git a/camel-core/src/main/java/org/apache/camel/impl/DefaultDebugger.java b/camel-core/src/main/java/org/apache/camel/impl/DefaultDebugger.java
index 38dec875a0c87..acf6f481f7d88 100644
--- a/camel-core/src/main/java/org/apache/camel/impl/DefaultDebugger.java
+++ b/camel-core/src/main/java/org/apache/camel/impl/DefaultDebugger.java
@@ -19,14 +19,23 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
+import java.util.EventObject;
import java.util.List;
+import org.apache.camel.CamelContext;
+import org.apache.camel.CamelContextAware;
import org.apache.camel.Exchange;
+import org.apache.camel.LoggingLevel;
import org.apache.camel.Processor;
+import org.apache.camel.RouteNode;
+import org.apache.camel.management.EventNotifierSupport;
+import org.apache.camel.management.event.AbstractExchangeEvent;
import org.apache.camel.model.ProcessorDefinition;
+import org.apache.camel.processor.interceptor.Tracer;
import org.apache.camel.spi.Breakpoint;
import org.apache.camel.spi.Condition;
import org.apache.camel.spi.Debugger;
+import org.apache.camel.util.ObjectHelper;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -35,10 +44,11 @@
*
* @version $Revision$
*/
-public class DefaultDebugger implements Debugger {
+public class DefaultDebugger implements Debugger, CamelContextAware {
private static final Log LOG = LogFactory.getLog(DefaultDebugger.class);
private final List<BreakpointConditions> breakpoints = new ArrayList<BreakpointConditions>();
+ private CamelContext camelContext;
/**
* Holder class for breakpoint and the associated conditions
@@ -65,6 +75,21 @@ public List<Condition> getConditions() {
}
}
+ public DefaultDebugger() {
+ }
+
+ public DefaultDebugger(CamelContext camelContext) {
+ this.camelContext = camelContext;
+ }
+
+ public CamelContext getCamelContext() {
+ return camelContext;
+ }
+
+ public void setCamelContext(CamelContext camelContext) {
+ this.camelContext = camelContext;
+ }
+
public void addBreakpoint(Breakpoint breakpoint) {
breakpoints.add(new BreakpointConditions(breakpoint));
}
@@ -97,7 +122,24 @@ public List<Breakpoint> getBreakpoints() {
return Collections.unmodifiableList(answer);
}
- public boolean onExchange(Exchange exchange, Processor processor, ProcessorDefinition definition) {
+ public boolean beforeProcess(Exchange exchange, Processor processor, ProcessorDefinition definition) {
+ boolean match = false;
+
+ // does any of the breakpoints apply?
+ for (BreakpointConditions breakpoint : breakpoints) {
+ // breakpoint must be active
+ if (Breakpoint.State.Active.equals(breakpoint.getBreakpoint().getState())) {
+ if (matchConditions(exchange, processor, definition, breakpoint)) {
+ match = true;
+ onBeforeProcess(exchange, processor, definition, breakpoint.getBreakpoint());
+ }
+ }
+ }
+
+ return match;
+ }
+
+ public boolean afterProcess(Exchange exchange, Processor processor, ProcessorDefinition definition) {
boolean match = false;
// does any of the breakpoints apply?
@@ -106,7 +148,7 @@ public boolean onExchange(Exchange exchange, Processor processor, ProcessorDefin
if (Breakpoint.State.Active.equals(breakpoint.getBreakpoint().getState())) {
if (matchConditions(exchange, processor, definition, breakpoint)) {
match = true;
- onBreakpoint(exchange, processor, definition, breakpoint.getBreakpoint());
+ onAfterProcess(exchange, processor, definition, breakpoint.getBreakpoint());
}
}
}
@@ -114,10 +156,61 @@ public boolean onExchange(Exchange exchange, Processor processor, ProcessorDefin
return match;
}
- private boolean matchConditions(Exchange exchange, Processor processor, ProcessorDefinition definition, BreakpointConditions breakpoint) {
+ public boolean onEvent(Exchange exchange, EventObject event) {
+ boolean match = false;
+
+ // does any of the breakpoints apply?
+ for (BreakpointConditions breakpoint : breakpoints) {
+ // breakpoint must be active
+ if (Breakpoint.State.Active.equals(breakpoint.getBreakpoint().getState())) {
+ if (matchConditions(exchange, event, breakpoint)) {
+ match = true;
+ onEvent(exchange, event, breakpoint.getBreakpoint());
+ }
+ }
+ }
+
+ return match;
+ }
+
+ protected void onBeforeProcess(Exchange exchange, Processor processor, ProcessorDefinition definition, Breakpoint breakpoint) {
+ try {
+ breakpoint.beforeProcess(exchange, processor, definition);
+ } catch (Throwable e) {
+ LOG.warn("Exception occurred in breakpoint: " + breakpoint + ". This exception will be ignored.", e);
+ }
+ }
+
+ protected void onAfterProcess(Exchange exchange, Processor processor, ProcessorDefinition definition, Breakpoint breakpoint) {
+ try {
+ breakpoint.afterProcess(exchange, processor, definition);
+ } catch (Throwable e) {
+ LOG.warn("Exception occurred in breakpoint: " + breakpoint + ". This exception will be ignored.", e);
+ }
+ }
+
+ protected void onEvent(Exchange exchange, EventObject event, Breakpoint breakpoint) {
+ ProcessorDefinition definition = null;
+
+ // try to get the last known definition
+ if (exchange.getUnitOfWork() != null && exchange.getUnitOfWork().getTracedRouteNodes() != null) {
+ RouteNode node = exchange.getUnitOfWork().getTracedRouteNodes().getLastNode();
+ if (node != null) {
+ definition = node.getProcessorDefinition();
+ }
+ }
+
+ try {
+ breakpoint.onEvent(exchange, event, definition);
+ } catch (Throwable e) {
+ LOG.warn("Exception occurred in breakpoint: " + breakpoint + ". This exception will be ignored.", e);
+ }
+ }
+
+ private boolean matchConditions(Exchange exchange, Processor processor, ProcessorDefinition definition, BreakpointConditions breakpoint) {
if (breakpoint.getConditions() != null && !breakpoint.getConditions().isEmpty()) {
for (Condition condition : breakpoint.getConditions()) {
- if (!condition.match(exchange, definition)) {
+ if (!condition.matchProcess(exchange, processor, definition)) {
return false;
}
}
@@ -126,21 +219,66 @@ private boolean matchConditions(Exchange exchange, Processor processor, Process
return true;
}
- protected void onBreakpoint(Exchange exchange, Processor processor, ProcessorDefinition definition, Breakpoint breakpoint) {
- breakpoint.onExchange(exchange, processor, definition);
+ private boolean matchConditions(Exchange exchange, EventObject event, BreakpointConditions breakpoint) {
+ if (breakpoint.getConditions() != null && !breakpoint.getConditions().isEmpty()) {
+ for (Condition condition : breakpoint.getConditions()) {
+ if (!condition.matchEvent(exchange, event)) {
+ return false;
+ }
+ }
+ }
+
+ return true;
}
public void start() throws Exception {
- // noop
+ ObjectHelper.notNull(camelContext, "CamelContext", this);
+ // register our event notifier
+ camelContext.getManagementStrategy().addEventNotifier(new DebugEventNotifier());
+ Tracer tracer = Tracer.getTracer(camelContext);
+ if (tracer == null) {
+ // tracer is disabled so enable it silently so we can leverage it to trace the Exchanges for us
+ tracer = Tracer.createTracer(camelContext);
+ tracer.setLogLevel(LoggingLevel.OFF);
+ camelContext.addService(tracer);
+ camelContext.addInterceptStrategy(tracer);
+ }
}
public void stop() throws Exception {
breakpoints.clear();
- // noop
}
@Override
public String toString() {
return "DefaultDebugger";
}
+
+ private final class DebugEventNotifier extends EventNotifierSupport {
+
+ private DebugEventNotifier() {
+ setIgnoreCamelContextEvents(true);
+ setIgnoreServiceEvents(true);
+ }
+
+ public void notify(EventObject event) throws Exception {
+ AbstractExchangeEvent aee = (AbstractExchangeEvent) event;
+ Exchange exchange = aee.getExchange();
+ onEvent(exchange, event);
+ }
+
+ public boolean isEnabled(EventObject event) {
+ return event instanceof AbstractExchangeEvent;
+ }
+
+ @Override
+ protected void doStart() throws Exception {
+ // noop
+ }
+
+ @Override
+ protected void doStop() throws Exception {
+ // noop
+ }
+ }
}
diff --git a/camel-core/src/main/java/org/apache/camel/management/event/AbstractExchangeEvent.java b/camel-core/src/main/java/org/apache/camel/management/event/AbstractExchangeEvent.java
new file mode 100644
index 0000000000000..0a3f2694392aa
--- /dev/null
+++ b/camel-core/src/main/java/org/apache/camel/management/event/AbstractExchangeEvent.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.management.event;
+
+import java.util.EventObject;
+
+import org.apache.camel.Exchange;
+
+/**
+ * Base class for {@link Exchange} events.
+ *
+ * @version $Revision$
+ */
+public abstract class AbstractExchangeEvent extends EventObject {
+
+ private final Exchange exchange;
+
+ public AbstractExchangeEvent(Exchange source) {
+ super(source);
+ this.exchange = source;
+ }
+
+ public Exchange getExchange() {
+ return exchange;
+ }
+}
diff --git a/camel-core/src/main/java/org/apache/camel/management/event/ExchangeCompletedEvent.java b/camel-core/src/main/java/org/apache/camel/management/event/ExchangeCompletedEvent.java
index e460e609a188d..4b89267d7c5e7 100644
--- a/camel-core/src/main/java/org/apache/camel/management/event/ExchangeCompletedEvent.java
+++ b/camel-core/src/main/java/org/apache/camel/management/event/ExchangeCompletedEvent.java
@@ -16,29 +16,20 @@
*/
package org.apache.camel.management.event;
-import java.util.EventObject;
-
import org.apache.camel.Exchange;
/**
* @version $Revision$
*/
-public class ExchangeCompletedEvent extends EventObject {
+public class ExchangeCompletedEvent extends AbstractExchangeEvent {
private static final long serialVersionUID = -3231801412021356098L;
- private final Exchange exchange;
-
public ExchangeCompletedEvent(Exchange source) {
super(source);
- this.exchange = source;
- }
-
- public Exchange getExchange() {
- return exchange;
}
@Override
public String toString() {
- return exchange.getExchangeId() + " exchange completed: " + exchange;
+ return getExchange().getExchangeId() + " exchange completed: " + getExchange();
}
}
diff --git a/camel-core/src/main/java/org/apache/camel/management/event/ExchangeCreatedEvent.java b/camel-core/src/main/java/org/apache/camel/management/event/ExchangeCreatedEvent.java
index 1cf6a50d5dfbc..658d9ba3079aa 100644
--- a/camel-core/src/main/java/org/apache/camel/management/event/ExchangeCreatedEvent.java
+++ b/camel-core/src/main/java/org/apache/camel/management/event/ExchangeCreatedEvent.java
@@ -16,29 +16,20 @@
*/
package org.apache.camel.management.event;
-import java.util.EventObject;
-
import org.apache.camel.Exchange;
/**
* @version $Revision$
*/
-public class ExchangeCreatedEvent extends EventObject {
+public class ExchangeCreatedEvent extends AbstractExchangeEvent {
private static final long serialVersionUID = -19248832613958243L;
- private final Exchange exchange;
-
public ExchangeCreatedEvent(Exchange source) {
super(source);
- this.exchange = source;
- }
-
- public Exchange getExchange() {
- return exchange;
}
@Override
public String toString() {
- return exchange.getExchangeId() + " exchange created: " + exchange;
+ return getExchange().getExchangeId() + " exchange created: " + getExchange();
}
}
diff --git a/camel-core/src/main/java/org/apache/camel/management/event/ExchangeFailureEvent.java b/camel-core/src/main/java/org/apache/camel/management/event/ExchangeFailureEvent.java
index 15ff4d57b0d30..85e537b3d45d8 100644
--- a/camel-core/src/main/java/org/apache/camel/management/event/ExchangeFailureEvent.java
+++ b/camel-core/src/main/java/org/apache/camel/management/event/ExchangeFailureEvent.java
@@ -16,34 +16,25 @@
*/
package org.apache.camel.management.event;
-import java.util.EventObject;
-
import org.apache.camel.Exchange;
/**
* @version $Revision$
*/
-public class ExchangeFailureEvent extends EventObject {
+public class ExchangeFailureEvent extends AbstractExchangeEvent {
private static final long serialVersionUID = -8484326904627268101L;
- private final Exchange exchange;
-
public ExchangeFailureEvent(Exchange source) {
super(source);
- this.exchange = source;
- }
-
- public Exchange getExchange() {
- return exchange;
}
@Override
public String toString() {
- Exception cause = exchange.getException();
+ Exception cause = getExchange().getException();
if (cause != null) {
- return exchange.getExchangeId() + " exchange failure: " + exchange + " cause " + cause;
+ return getExchange().getExchangeId() + " exchange failure: " + getExchange() + " cause " + cause;
} else {
- return exchange.getExchangeId() + " exchange failure: " + exchange;
+ return getExchange().getExchangeId() + " exchange failure: " + getExchange();
}
}
}
diff --git a/camel-core/src/main/java/org/apache/camel/management/event/ExchangeFailureHandledEvent.java b/camel-core/src/main/java/org/apache/camel/management/event/ExchangeFailureHandledEvent.java
index 740e3b0890b15..c711e06363f51 100644
--- a/camel-core/src/main/java/org/apache/camel/management/event/ExchangeFailureHandledEvent.java
+++ b/camel-core/src/main/java/org/apache/camel/management/event/ExchangeFailureHandledEvent.java
@@ -16,32 +16,24 @@
*/
package org.apache.camel.management.event;
-import java.util.EventObject;
-
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
/**
* @version $Revision$
*/
-public class ExchangeFailureHandledEvent extends EventObject {
+public class ExchangeFailureHandledEvent extends AbstractExchangeEvent {
private static final long serialVersionUID = -7554809462006009547L;
- private final Exchange exchange;
private final Processor failureHandler;
private final boolean deadLetterChannel;
private final boolean handled;
public ExchangeFailureHandledEvent(Exchange source, Processor failureHandler, boolean deadLetterChannel) {
super(source);
- this.exchange = source;
this.failureHandler = failureHandler;
this.deadLetterChannel = deadLetterChannel;
- this.handled = exchange.getProperty(Exchange.ERRORHANDLER_HANDLED, false, Boolean.class);
- }
-
- public Exchange getExchange() {
- return exchange;
+ this.handled = source.getProperty(Exchange.ERRORHANDLER_HANDLED, false, Boolean.class);
}
public Processor getFailureHandler() {
@@ -59,9 +51,9 @@ public boolean isHandled() {
@Override
public String toString() {
if (isDeadLetterChannel()) {
- return exchange.getExchangeId() + " exchange failed: " + exchange + " but was handled by dead letter channel: " + failureHandler;
+ return getExchange().getExchangeId() + " exchange failed: " + getExchange() + " but was handled by dead letter channel: " + failureHandler;
} else {
- return exchange.getExchangeId() + " exchange failed: " + exchange + " but was processed by: " + failureHandler;
+ return getExchange().getExchangeId() + " exchange failed: " + getExchange() + " but was processed by: " + failureHandler;
}
}
}
diff --git a/camel-core/src/main/java/org/apache/camel/management/event/ExchangeSentEvent.java b/camel-core/src/main/java/org/apache/camel/management/event/ExchangeSentEvent.java
index bbe6f354e5933..d264bc82c43db 100644
--- a/camel-core/src/main/java/org/apache/camel/management/event/ExchangeSentEvent.java
+++ b/camel-core/src/main/java/org/apache/camel/management/event/ExchangeSentEvent.java
@@ -16,32 +16,24 @@
*/
package org.apache.camel.management.event;
-import java.util.EventObject;
-
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
/**
* @version $Revision$
*/
-public class ExchangeSentEvent extends EventObject {
+public class ExchangeSentEvent extends AbstractExchangeEvent {
private static final long serialVersionUID = -19248832613958123L;
- private final Exchange exchange;
private final Endpoint endpoint;
private final long timeTaken;
public ExchangeSentEvent(Exchange source, Endpoint endpoint, long timeTaken) {
super(source);
- this.exchange = source;
this.endpoint = endpoint;
this.timeTaken = timeTaken;
}
- public Exchange getExchange() {
- return exchange;
- }
-
public Endpoint getEndpoint() {
return endpoint;
}
@@ -52,7 +44,7 @@ public long getTimeTaken() {
@Override
public String toString() {
- return exchange.getExchangeId() + " exchange " + exchange + " sent to: " + endpoint.getEndpointUri() + " took: " + timeTaken + " ms.";
+ return getExchange().getExchangeId() + " exchange " + getExchange() + " sent to: " + endpoint.getEndpointUri() + " took: " + timeTaken + " ms.";
}
}
\ No newline at end of file
diff --git a/camel-core/src/main/java/org/apache/camel/processor/interceptor/Debug.java b/camel-core/src/main/java/org/apache/camel/processor/interceptor/Debug.java
index 5f6ddfa1fd180..1733eed3caaee 100644
--- a/camel-core/src/main/java/org/apache/camel/processor/interceptor/Debug.java
+++ b/camel-core/src/main/java/org/apache/camel/processor/interceptor/Debug.java
@@ -26,6 +26,8 @@
import org.apache.camel.spi.InterceptStrategy;
/**
+ * A debug interceptor to notify {@link Debugger} with {@link Exchange}s being processed.
+ *
* @version $Revision$
*/
public class Debug implements InterceptStrategy {
@@ -40,9 +42,16 @@ public Processor wrapProcessorInInterceptors(final CamelContext context, final P
final Processor target, final Processor nextTarget) throws Exception {
return new DelegateAsyncProcessor(target) {
@Override
- public boolean process(Exchange exchange, AsyncCallback callback) {
- debugger.onExchange(exchange, target, definition);
- return super.process(exchange, callback);
+ public boolean process(final Exchange exchange, final AsyncCallback callback) {
+ debugger.beforeProcess(exchange, target, definition);
+
+ return super.process(exchange, new AsyncCallback() {
+ public void done(boolean doneSync) {
+ debugger.afterProcess(exchange, processor, definition);
+ // must notify original callback
+ callback.done(doneSync);
+ }
+ });
}
@Override
diff --git a/camel-core/src/main/java/org/apache/camel/spi/Breakpoint.java b/camel-core/src/main/java/org/apache/camel/spi/Breakpoint.java
index d4f1d98d33a68..8153955073039 100644
--- a/camel-core/src/main/java/org/apache/camel/spi/Breakpoint.java
+++ b/camel-core/src/main/java/org/apache/camel/spi/Breakpoint.java
@@ -16,6 +16,8 @@
*/
package org.apache.camel.spi;
+import java.util.EventObject;
+
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.model.ProcessorDefinition;
@@ -26,20 +28,19 @@
* This allows you to register {@link org.apache.camel.spi.Breakpoint}s to the {@link org.apache.camel.spi.Debugger}
* and have those breakpoints activated when their {@link org.apache.camel.spi.Condition}s match.
* <p/>
- * If any exceptions is thrown from the {@link #onExchange(org.apache.camel.Exchange, org.apache.camel.Processor, org.apache.camel.model.ProcessorDefinition)}
- * method then the {@link org.apache.camel.spi.Debugger} will catch and log those at <tt>WARN</tt> level and continue.
+ * If any exceptions is thrown from the callback methods then the {@link org.apache.camel.spi.Debugger}
+ * will catch and log those at <tt>WARN</tt> level and continue. This ensures Camel can continue to route
+ * the message without having breakpoints causing issues.
*
+ * @version $Revision$
* @see org.apache.camel.spi.Debugger
* @see org.apache.camel.spi.Condition
- * @version $Revision$
*/
public interface Breakpoint {
- // TODO: Hook into the EventNotifier so we can have breakpoints trigger on those conditions as well
- // exceptions, create, done, etc. and a FollowMe condition to follow a single exchange
- // while others are being routed so you can follow one only, eg need an API on Debugger for that
-
- enum State { Active, Suspended }
+ enum State {
+ Active, Suspended
+ }
/**
* Gets the state of this break
@@ -59,12 +60,32 @@ enum State { Active, Suspended }
void activate();
/**
- * Callback invoked when the breakpoint was hit.
+ * Callback invoked when the breakpoint was hit and the {@link Exchange} is about to be processed (before).
+ *
+ * @param exchange the {@link Exchange}
+ * @param processor the {@link Processor} about to be processed
+ * @param definition the {@link org.apache.camel.model.ProcessorDefinition} definition of the processor
+ */
+ void beforeProcess(Exchange exchange, Processor processor, ProcessorDefinition definition);
+
+ /**
+ * Callback invoked when the breakpoint was hit and the {@link Exchange} has been processed (after).
+ *
+ * @param exchange the {@link Exchange}
+ * @param processor the {@link Processor} which was processed
+ * @param definition the {@link org.apache.camel.model.ProcessorDefinition} definition of the processor
+ */
+ void afterProcess(Exchange exchange, Processor processor, ProcessorDefinition definition);
+
+ /**
+ * Callback invoked when the breakpoint was hit and any of the {@link Exchange} {@link EventObject event}s occurred.
*
- * @param exchange the {@link Exchange}
- * @param processor the {@link Processor} which is the next target
- * @param definition the {@link org.apache.camel.model.ProcessorDefinition} definition of the processor
+ * @param exchange the {@link Exchange}
+ * @param event the event (instance of {@link org.apache.camel.management.event.AbstractExchangeEvent}
+ * @param definition the {@link org.apache.camel.model.ProcessorDefinition} definition of the last processor executed,
+ * may be <tt>null</tt> if not possible to resolve from tracing
+ * @see org.apache.camel.management.event.AbstractExchangeEvent
*/
- void onExchange(Exchange exchange, Processor processor, ProcessorDefinition definition);
+ void onEvent(Exchange exchange, EventObject event, ProcessorDefinition definition);
}
diff --git a/camel-core/src/main/java/org/apache/camel/spi/Condition.java b/camel-core/src/main/java/org/apache/camel/spi/Condition.java
index 9773a05e8f378..e5ccd6617e835 100644
--- a/camel-core/src/main/java/org/apache/camel/spi/Condition.java
+++ b/camel-core/src/main/java/org/apache/camel/spi/Condition.java
@@ -16,7 +16,10 @@
*/
package org.apache.camel.spi;
+import java.util.EventObject;
+
import org.apache.camel.Exchange;
+import org.apache.camel.Processor;
import org.apache.camel.model.ProcessorDefinition;
/**
@@ -33,9 +36,20 @@ public interface Condition {
* Does the condition match
*
* @param exchange the exchange
- * @param definition the current node in the route where the Exchange is at
+ * @param processor the {@link Processor}
+ * @param definition the present location in the route where the {@link Exchange} is located at
+ * @return <tt>true</tt> to match, <tt>false</tt> otherwise
+ */
+ boolean matchProcess(Exchange exchange, Processor processor, ProcessorDefinition definition);
+
+ /**
+ * Does the condition match
+ *
+ * @param exchange the exchange
+ * @param event the event (instance of {@link org.apache.camel.management.event.AbstractExchangeEvent}
* @return <tt>true</tt> to match, <tt>false</tt> otherwise
+ * @see org.apache.camel.management.event.AbstractExchangeEvent
*/
- boolean match(Exchange exchange, ProcessorDefinition definition);
+ boolean matchEvent(Exchange exchange, EventObject event);
}
diff --git a/camel-core/src/main/java/org/apache/camel/spi/Debugger.java b/camel-core/src/main/java/org/apache/camel/spi/Debugger.java
index f2c6a78f91807..13823e54e8a9e 100644
--- a/camel-core/src/main/java/org/apache/camel/spi/Debugger.java
+++ b/camel-core/src/main/java/org/apache/camel/spi/Debugger.java
@@ -16,8 +16,10 @@
*/
package org.apache.camel.spi;
+import java.util.EventObject;
import java.util.List;
+import org.apache.camel.CamelContextAware;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.Service;
@@ -29,7 +31,7 @@
*
* @version $Revision$
*/
-public interface Debugger extends Service {
+public interface Debugger extends Service, CamelContextAware {
/**
* Add the given breakpoint
@@ -70,15 +72,36 @@ public interface Debugger extends Service {
*/
List<Breakpoint> getBreakpoints();
+ /**
+ * Callback invoked when an {@link Exchange} is about to be processed which allows implementators
+ * to notify breakpoints.
+ *
+ * @param exchange the exchange
+ * @param processor the {@link Processor} about to be processed
+ * @param definition the definition of the processor
+ * @return <tt>true</tt> if any breakpoint was hit, <tt>false</tt> if not breakpoint was hit
+ */
+ boolean beforeProcess(Exchange exchange, Processor processor, ProcessorDefinition definition);
+
+ /**
+ * Callback invoked when an {@link Exchange} has been processed which allows implementators
+ * to notify breakpoints.
+ *
+ * @param exchange the exchange
+ * @param processor the {@link Processor} which was processed
+ * @param definition the definition of the processor
+ * @return <tt>true</tt> if any breakpoint was hit, <tt>false</tt> if not breakpoint was hit
+ */
+ boolean afterProcess(Exchange exchange, Processor processor, ProcessorDefinition definition);
+
/**
* Callback invoked when an {@link Exchange} is being processed which allows implementators
* to notify breakpoints.
*
- * @param exchange the exchange
- * @param processor the target processor (to be processed next)
- * @param definition the definition of the processor
+ * @param exchange the exchange
+ * @param event the event (instance of {@link org.apache.camel.management.event.AbstractExchangeEvent}
* @return <tt>true</tt> if any breakpoint was hit, <tt>false</tt> if not breakpoint was hit
*/
- boolean onExchange(Exchange exchange, Processor processor, ProcessorDefinition definition);
+ boolean onEvent(Exchange exchange, EventObject event);
}
diff --git a/camel-core/src/test/java/org/apache/camel/processor/interceptor/DebugExceptionBreakpointTest.java b/camel-core/src/test/java/org/apache/camel/processor/interceptor/DebugExceptionBreakpointTest.java
new file mode 100644
index 0000000000000..96a698258ce16
--- /dev/null
+++ b/camel-core/src/test/java/org/apache/camel/processor/interceptor/DebugExceptionBreakpointTest.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.processor.interceptor;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.camel.ContextTestSupport;
+import org.apache.camel.Exchange;
+import org.apache.camel.Processor;
+import org.apache.camel.builder.RouteBuilder;
+import org.apache.camel.impl.BreakpointSupport;
+import org.apache.camel.impl.ConditionSupport;
+import org.apache.camel.impl.DefaultDebugger;
+import org.apache.camel.model.ProcessorDefinition;
+import org.apache.camel.spi.Breakpoint;
+import org.apache.camel.spi.Condition;
+
+/**
+ * @version $Revision$
+ */
+public class DebugExceptionBreakpointTest extends ContextTestSupport {
+
+ private List<String> logs = new ArrayList<String>();
+ private Condition exceptionCondition;
+ private Breakpoint breakpoint;
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+
+ breakpoint = new BreakpointSupport() {
+ @Override
+ public void afterProcess(Exchange exchange, Processor processor, ProcessorDefinition definition) {
+ Exception e = exchange.getException();
+ logs.add("Breakpoint at " + definition.getShortName() + " caused by: " + e.getClass().getSimpleName() + "[" + e.getMessage() + "]");
+ }
+ };
+
+ exceptionCondition = new ConditionSupport() {
+ @Override
+ public boolean matchProcess(Exchange exchange, Processor processor, ProcessorDefinition definition) {
+ return exchange.getException() != null;
+ }
+ };
+ }
+
+ public void testDebug() throws Exception {
+ context.getDebugger().addBreakpoint(breakpoint, exceptionCondition);
+
+ getMockEndpoint("mock:result").expectedBodiesReceived("Hello World");
+
+ template.sendBody("direct:start", "Hello World");
+ try {
+ template.sendBody("direct:start", "Hello Camel");
+ fail("Should have thrown exception");
+ } catch (Exception e) {
+ // ignore
+ }
+
+ assertMockEndpointsSatisfied();
+
+ assertEquals(2, logs.size());
+ assertEquals("Breakpoint at when caused by: IllegalArgumentException[Damn]", logs.get(0));
+ assertEquals("Breakpoint at choice caused by: IllegalArgumentException[Damn]", logs.get(1));
+ }
+
+ @Override
+ protected RouteBuilder createRouteBuilder() throws Exception {
+ return new RouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ // use debugger
+ context.setDebugger(new DefaultDebugger());
+
+ from("direct:start")
+ .to("log:foo")
+ .choice()
+ .when(body().contains("Camel")).throwException(new IllegalArgumentException("Damn"))
+ .end()
+ .to("mock:result");
+ }
+ };
+ }
+
+}
\ No newline at end of file
diff --git a/camel-core/src/test/java/org/apache/camel/processor/interceptor/DebugExceptionEventBreakpointTest.java b/camel-core/src/test/java/org/apache/camel/processor/interceptor/DebugExceptionEventBreakpointTest.java
new file mode 100644
index 0000000000000..2a7c6fc5b9ee8
--- /dev/null
+++ b/camel-core/src/test/java/org/apache/camel/processor/interceptor/DebugExceptionEventBreakpointTest.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.processor.interceptor;
+
+import java.util.ArrayList;
+import java.util.EventObject;
+import java.util.List;
+
+import org.apache.camel.ContextTestSupport;
+import org.apache.camel.Exchange;
+import org.apache.camel.builder.RouteBuilder;
+import org.apache.camel.impl.BreakpointSupport;
+import org.apache.camel.impl.ConditionSupport;
+import org.apache.camel.impl.DefaultDebugger;
+import org.apache.camel.management.event.AbstractExchangeEvent;
+import org.apache.camel.management.event.ExchangeFailureEvent;
+import org.apache.camel.model.ProcessorDefinition;
+import org.apache.camel.spi.Breakpoint;
+import org.apache.camel.spi.Condition;
+
+/**
+ * @version $Revision$
+ */
+public class DebugExceptionEventBreakpointTest extends ContextTestSupport {
+
+ private List<String> logs = new ArrayList<String>();
+ private Condition exceptionCondition;
+ private Breakpoint breakpoint;
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+
+ breakpoint = new BreakpointSupport() {
+ public void onEvent(Exchange exchange, EventObject event, ProcessorDefinition definition) {
+ AbstractExchangeEvent aee = (AbstractExchangeEvent) event;
+ Exception e = aee.getExchange().getException();
+ logs.add("Breakpoint at " + definition + " caused by: " + e.getClass().getSimpleName() + "[" + e.getMessage() + "]");
+ }
+ };
+
+ exceptionCondition = new ConditionSupport() {
+ public boolean matchEvent(Exchange exchange, EventObject event) {
+ return event instanceof ExchangeFailureEvent;
+ }
+ };
+ }
+
+ public void testDebug() throws Exception {
+ context.getDebugger().addBreakpoint(breakpoint, exceptionCondition);
+
+ getMockEndpoint("mock:result").expectedBodiesReceived("Hello World");
+
+ template.sendBody("direct:start", "Hello World");
+ try {
+ template.sendBody("direct:start", "Hello Camel");
+ fail("Should have thrown exception");
+ } catch (Exception e) {
+ // ignore
+ }
+
+ assertMockEndpointsSatisfied();
+
+ assertEquals(1, logs.size());
+ assertEquals("Breakpoint at ThrowException[java.lang.IllegalArgumentException] caused by: IllegalArgumentException[Damn]", logs.get(0));
+ }
+
+ @Override
+ protected RouteBuilder createRouteBuilder() throws Exception {
+ return new RouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ // use debugger
+ context.setDebugger(new DefaultDebugger());
+
+ from("direct:start")
+ .to("log:foo")
+ .choice()
+ .when(body().contains("Camel")).throwException(new IllegalArgumentException("Damn"))
+ .end()
+ .to("mock:result");
+ }
+ };
+ }
+
+}
\ No newline at end of file
diff --git a/camel-core/src/test/java/org/apache/camel/processor/interceptor/DebugTest.java b/camel-core/src/test/java/org/apache/camel/processor/interceptor/DebugTest.java
index 781d95d0add22..d35e381fe3939 100644
--- a/camel-core/src/test/java/org/apache/camel/processor/interceptor/DebugTest.java
+++ b/camel-core/src/test/java/org/apache/camel/processor/interceptor/DebugTest.java
@@ -17,6 +17,7 @@
package org.apache.camel.processor.interceptor;
import java.util.ArrayList;
+import java.util.EventObject;
import java.util.List;
import org.apache.camel.ContextTestSupport;
@@ -24,7 +25,9 @@
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.impl.BreakpointSupport;
+import org.apache.camel.impl.ConditionSupport;
import org.apache.camel.impl.DefaultDebugger;
+import org.apache.camel.management.event.ExchangeCompletedEvent;
import org.apache.camel.model.ProcessorDefinition;
import org.apache.camel.model.ToDefinition;
import org.apache.camel.spi.Breakpoint;
@@ -38,6 +41,7 @@ public class DebugTest extends ContextTestSupport {
private List<String> logs = new ArrayList<String>();
private Condition camelCondition;
private Condition mockCondition;
+ private Condition doneCondition;
private Breakpoint breakpoint;
@Override
@@ -45,20 +49,25 @@ protected void setUp() throws Exception {
super.setUp();
breakpoint = new BreakpointSupport() {
- public void onExchange(Exchange exchange, Processor processor, ProcessorDefinition definition) {
+ public void beforeProcess(Exchange exchange, Processor processor, ProcessorDefinition definition) {
String body = exchange.getIn().getBody(String.class);
logs.add("Breakpoint at " + definition + " with body: " + body);
}
+
+ public void onEvent(Exchange exchange, EventObject event, ProcessorDefinition definition) {
+ String body = exchange.getIn().getBody(String.class);
+ logs.add("Breakpoint event " + event.getClass().getSimpleName() + " with body: " + body);
+ }
};
- camelCondition = new Condition() {
- public boolean match(Exchange exchange, ProcessorDefinition definition) {
+ camelCondition = new ConditionSupport() {
+ public boolean matchProcess(Exchange exchange, Processor processor, ProcessorDefinition definition) {
return body().contains("Camel").matches(exchange);
}
};
- mockCondition = new Condition() {
- public boolean match(Exchange exchange, ProcessorDefinition definition) {
+ mockCondition = new ConditionSupport() {
+ public boolean matchProcess(Exchange exchange, Processor processor, ProcessorDefinition definition) {
// match when sending to mocks
if (definition instanceof ToDefinition) {
ToDefinition to = (ToDefinition) definition;
@@ -67,6 +76,13 @@ public boolean match(Exchange exchange, ProcessorDefinition definition) {
return false;
}
};
+
+ doneCondition = new ConditionSupport() {
+ @Override
+ public boolean matchEvent(Exchange exchange, EventObject event) {
+ return event instanceof ExchangeCompletedEvent;
+ }
+ };
}
public void testDebug() throws Exception {
@@ -84,6 +100,21 @@ public void testDebug() throws Exception {
assertEquals("Breakpoint at To[mock:result] with body: Hello Camel", logs.get(1));
}
+ public void testDebugEvent() throws Exception {
+ context.getDebugger().addBreakpoint(breakpoint, doneCondition);
+
+ getMockEndpoint("mock:result").expectedBodiesReceived("Hello World", "Hello Camel");
+
+ template.sendBody("direct:start", "Hello World");
+ template.sendBody("direct:start", "Hello Camel");
+
+ assertMockEndpointsSatisfied();
+
+ assertEquals(2, logs.size());
+ assertEquals("Breakpoint event ExchangeCompletedEvent with body: Hello World", logs.get(0));
+ assertEquals("Breakpoint event ExchangeCompletedEvent with body: Hello Camel", logs.get(1));
+ }
+
public void testDebugSuspended() throws Exception {
context.getDebugger().addBreakpoint(breakpoint, mockCondition, camelCondition);
|
5d29c390bf81bd1c5ec171751d6da9c0f862063c
|
drools
|
refactoring--git-svn-id: https://svn.jboss.org/repos/labs/trunk/labs/jbossrules@2301 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
p
|
https://github.com/kiegroup/drools
|
diff --git a/drools-core/src/main/java/org/drools/leaps/RuleBaseImpl.java b/drools-core/src/main/java/org/drools/leaps/RuleBaseImpl.java
index 7be511bebf3..d4b69d473f5 100644
--- a/drools-core/src/main/java/org/drools/leaps/RuleBaseImpl.java
+++ b/drools-core/src/main/java/org/drools/leaps/RuleBaseImpl.java
@@ -1,6 +1,5 @@
package org.drools.leaps;
-import java.io.Serializable;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
@@ -9,141 +8,125 @@
import java.util.Set;
import java.util.WeakHashMap;
+import org.drools.FactException;
import org.drools.RuleBase;
+import org.drools.RuleIntegrationException;
+import org.drools.RuleSetIntegrationException;
import org.drools.WorkingMemory;
-import org.drools.leaps.conflict.DefaultConflictResolver;
-import org.drools.reteoo.DefaultFactHandleFactory;
-import org.drools.rule.DuplicateRuleNameException;
+import org.drools.spi.FactHandleFactory;
import org.drools.rule.InvalidPatternException;
-import org.drools.rule.InvalidRuleException;
import org.drools.rule.Rule;
import org.drools.rule.RuleSet;
-import org.drools.spi.FactHandleFactory;
+import org.drools.spi.ClassObjectTypeResolver;
+import org.drools.spi.ObjectTypeResolver;
import org.drools.spi.RuleBaseContext;
/**
* This base class for the engine and analogous to Drool's RuleBase class. It
* has a similar interface adapted to the Leaps algorithm
- *
+ *
* @author Alexander Bagerman
*
*/
-public class RuleBaseImpl implements RuleBase, Serializable {
- private static final long serialVersionUID = 0L;
-
- // to store rules added just as addRule rather than as a part of ruleSet
- private final static String defaultRuleSet = "___default___rule___set___";
-
- private HashMap ruleSets;
-
- private Map applicationData;
-
- private RuleBaseContext ruleBaseContext;
-
- private ConflictResolver conflictResolver;
-
- private Builder builder;
+public class RuleBaseImpl implements RuleBase {
private HashMap leapsRules = new HashMap();
+ private final Builder builder;
+
/**
* TODO we do not need it here. and it references RETEoo class
*
* The fact handle factory.
*/
+ /** The fact handle factory. */
private final FactHandleFactory factHandleFactory;
- /* @todo: replace this with a weak HashSet */
+ private Set ruleSets;
+
+ private Map applicationData;
+
+ private RuleBaseContext ruleBaseContext;
+
+ // @todo: replace this with a weak HashSet
+ /**
+ * WeakHashMap to keep references of WorkingMemories but allow them to be
+ * garbage collected
+ */
private final transient Map workingMemories;
/** Special value when adding to the underlying map. */
private static final Object PRESENT = new Object();
+ // ------------------------------------------------------------
+ // Constructors
+ // ------------------------------------------------------------
+
/**
- * constractor that supplies default conflict resolution
+ * Construct.
*
- * @see LeapsDefaultConflictResolver
+ * @param rete
+ * The rete network.
*/
- public RuleBaseImpl() throws DuplicateRuleNameException,
- InvalidPatternException, InvalidRuleException {
- this(DefaultConflictResolver.getInstance(),
- new DefaultFactHandleFactory(), new HashSet(), new HashMap(),
+ public RuleBaseImpl() throws RuleIntegrationException,
+ RuleSetIntegrationException, FactException, InvalidPatternException {
+ this(new HandleFactory(), new HashSet(), new HashMap(),
new RuleBaseContext());
-
}
- public RuleBaseImpl(ConflictResolver conflictResolver,
- FactHandleFactory factHandleFactory, Set ruleSets,
+ /**
+ * Construct.
+ *
+ * @param rete
+ * The rete network.
+ * @param conflictResolver
+ * The conflict resolver.
+ * @param factHandleFactory
+ * The fact handle factory.
+ * @param ruleSets
+ * @param applicationData
+ */
+ public RuleBaseImpl(FactHandleFactory factHandleFactory, Set ruleSets,
Map applicationData, RuleBaseContext ruleBaseContext)
- throws DuplicateRuleNameException, InvalidPatternException,
- InvalidRuleException {
+ throws RuleIntegrationException, RuleSetIntegrationException,
+ FactException, InvalidPatternException {
+ ObjectTypeResolver resolver = new ClassObjectTypeResolver();
+ this.builder = new Builder(this, resolver);
this.factHandleFactory = factHandleFactory;
- this.conflictResolver = conflictResolver;
+ this.ruleSets = ruleSets;
this.applicationData = applicationData;
this.ruleBaseContext = ruleBaseContext;
this.workingMemories = new WeakHashMap();
- this.builder = new Builder();
-
- this.ruleSets = new HashMap();
- if (ruleSets != null) {
- int i = 0;
- RuleSet ruleSet;
- for (Iterator it = ruleSets.iterator(); it.hasNext(); i++) {
- ruleSet = (RuleSet) it.next();
- this.ruleSets.put(new Integer(i), ruleSet);
- Rule[] rules = ruleSet.getRules();
- for (int k = 0; k < rules.length; k++) {
- this.addRule(rules[k]);
- }
- }
+
+ this.ruleSets = new HashSet();
+ for (Iterator it = ruleSets.iterator(); it.hasNext();) {
+ this.addRuleSet((RuleSet) it.next());
}
- // default one to collect standalone rules
- this.ruleSets.put(defaultRuleSet, new RuleSet(defaultRuleSet,
- this.ruleBaseContext));
}
- /**
- * constractor. Takes conflict resolution class that for each fact and rule
- * sides must not return 0 if o1 != 02
- */
-
- public RuleBaseImpl(ConflictResolver conflictResolver)
- throws DuplicateRuleNameException, InvalidPatternException,
- InvalidRuleException {
- this(conflictResolver, new DefaultFactHandleFactory(), new HashSet(),
- new HashMap(), new RuleBaseContext());
-
- }
+ // ------------------------------------------------------------
+ // Instance methods
+ // ------------------------------------------------------------
/**
- * factory method for new working memory. will keep reference by default.
- * <b>Note:</b> references kept in a week hashmap.
- *
- * @return new working memory instance
- *
- * @see LeapsWorkingMemory
+ * @see RuleBase
*/
public WorkingMemory newWorkingMemory() {
- return this.newWorkingMemory(true);
+ return newWorkingMemory(true);
}
/**
- * factory method for new working memory. will keep reference by default.
- * <b>Note:</b> references kept in a week hashmap.
- *
- * @param keepReference
- * @return new working memory instance
- *
- * @see LeapsWorkingMemory
+ * @see RuleBase
*/
public WorkingMemory newWorkingMemory(boolean keepReference) {
- WorkingMemory workingMemory = new WorkingMemoryImpl(this);
- // process existing rules
+ WorkingMemoryImpl workingMemory = new WorkingMemoryImpl(this);
+ // add all rules added so far
for (Iterator it = this.leapsRules.values().iterator(); it.hasNext();) {
((WorkingMemoryImpl) workingMemory).addLeapsRules((List) it.next());
}
+ //
if (keepReference) {
- this.workingMemories.put(workingMemory, PRESENT);
+ this.workingMemories.put(workingMemory, RuleBaseImpl.PRESENT);
}
return workingMemory;
}
@@ -152,70 +135,127 @@ void disposeWorkingMemory(WorkingMemory workingMemory) {
this.workingMemories.remove(workingMemory);
}
- public Set getWorkingMemories() {
- return this.workingMemories.keySet();
- }
-
/**
- * TODO clash with leaps conflict resolver
+ * TODO do not understand its location here
*
* @see RuleBase
*/
- public org.drools.spi.ConflictResolver getConflictResolver() {
- return (org.drools.spi.ConflictResolver) null;
+ public FactHandleFactory getFactHandleFactory() {
+ return this.factHandleFactory;
}
- public ConflictResolver getLeapsConflictResolver() {
- return this.conflictResolver;
+ public FactHandleFactory newFactHandleFactory() {
+ return this.factHandleFactory.newInstance();
}
/**
- * @see RuleBase
+ * Assert a fact object.
+ *
+ * @param handle
+ * The handle.
+ * @param object
+ * The fact.
+ * @param workingMemory
+ * The working-memory.
+ *
+ * @throws FactException
+ * If an error occurs while performing the assertion.
*/
- public RuleSet[] getRuleSets() {
- return (RuleSet[]) this.ruleSets.values().toArray(
- new RuleSet[this.ruleSets.size()]);
- }
+// void assertObject(FactHandle handle, Object object,
+// PropagationContext context, WorkingMemoryImpl workingMemory)
+// throws FactException {
+// workingMemory.assertObject(object);
+// }
/**
- * Creates leaps rule wrappers and propagate rule to the working memories
+ * Retract a fact object.
*
- * @param rule
- * @throws DuplicateRuleNameException
- * @throws InvalidRuleException
- * @throws InvalidPatternException
+ * @param handle
+ * The handle.
+ * @param workingMemory
+ * The working-memory.
+ *
+ * @throws FactException
+ * If an error occurs while performing the retraction.
*/
- public void addRule(Rule rule) throws DuplicateRuleNameException,
- InvalidRuleException, InvalidPatternException {
- List rules = this.builder.processRule(rule);
-
- this.leapsRules.put(rule, rules);
-
- for(Iterator it = this.workingMemories.keySet().iterator(); it.hasNext();) {
- ((WorkingMemoryImpl)it.next()).addLeapsRules(rules);
- }
+// void retractObject(FactHandle handle, PropagationContext context,
+// WorkingMemoryImpl workingMemory) throws FactException {
+// workingMemory.retractObject(handle);
+// }
+//
+ public RuleSet[] getRuleSets() {
+ return (RuleSet[]) this.ruleSets.toArray(new RuleSet[this.ruleSets
+ .size()]);
}
- /**
- * @see RuleBase
- */
public Map getApplicationData() {
return this.applicationData;
}
- /**
- * @see RuleBase
- */
public RuleBaseContext getRuleBaseContext() {
return this.ruleBaseContext;
}
/**
- * TODO do not understand its location here
+ * Add a <code>RuleSet</code> to the network. Iterates through the
+ * <code>RuleSet</code> adding Each individual <code>Rule</code> to the
+ * network.
*
- * @see RuleBase
+ * @param ruleSet
+ * The rule-set to add.
+ *
+ * @throws RuleIntegrationException
+ * if an error prevents complete construction of the network for
+ * the <code>Rule</code>.
+ * @throws FactException
+ * @throws InvalidPatternException
*/
- public FactHandleFactory getFactHandleFactory() {
- return this.factHandleFactory;
+ public void addRuleSet(RuleSet ruleSet) throws RuleIntegrationException,
+ RuleSetIntegrationException, FactException, InvalidPatternException {
+ Map newApplicationData = ruleSet.getApplicationData();
+
+ // Check that the application data is valid, we cannot change the type
+ // of an already declared application data variable
+ for (Iterator it = newApplicationData.keySet().iterator(); it.hasNext();) {
+ String identifier = (String) it.next();
+ Class type = (Class) newApplicationData.get(identifier);
+ if (this.applicationData.containsKey(identifier)
+ && !this.applicationData.get(identifier).equals(type)) {
+ throw new RuleSetIntegrationException(ruleSet);
+ }
+ }
+ this.applicationData.putAll(newApplicationData);
+
+ this.ruleSets.add(ruleSet);
+
+ Rule[] rules = ruleSet.getRules();
+
+ for (int i = 0; i < rules.length; ++i) {
+ addRule(rules[i]);
+ }
+ }
+
+ /**
+ * Creates leaps rule wrappers and propagate rule to the working memories
+ *
+ * @param rule
+ * @throws FactException
+ * @throws RuleIntegrationException
+ * @throws InvalidPatternException
+ */
+ public void addRule(Rule rule) throws FactException,
+ RuleIntegrationException, InvalidPatternException {
+ List rules = this.builder.processRule(rule);
+
+ this.leapsRules.put(rule, rules);
+
+ for (Iterator it = this.workingMemories.keySet().iterator(); it
+ .hasNext();) {
+ ((WorkingMemoryImpl) it.next()).addLeapsRules(rules);
+ }
+ }
+
+ public Set getWorkingMemories() {
+ return this.workingMemories.keySet();
}
}
|
43faa7747805c8b5fc88bbc54c6af336561e1357
|
kotlin
|
Standard library is imported properly rather than- enclosing the user code--
|
c
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/frontend/src/org/jetbrains/jet/lang/resolve/AnalyzingUtils.java b/compiler/frontend/src/org/jetbrains/jet/lang/resolve/AnalyzingUtils.java
index ade95c7d7472e..70e6a3be7f935 100644
--- a/compiler/frontend/src/org/jetbrains/jet/lang/resolve/AnalyzingUtils.java
+++ b/compiler/frontend/src/org/jetbrains/jet/lang/resolve/AnalyzingUtils.java
@@ -68,8 +68,10 @@ public BindingContext analyzeNamespaces(@NotNull Project project, @NotNull Colle
JetScope libraryScope = semanticServices.getStandardLibrary().getLibraryScope();
ModuleDescriptor owner = new ModuleDescriptor("<module>");
- final WritableScope scope = new WritableScopeImpl(libraryScope, owner, new TraceBasedRedeclarationHandler(bindingTraceContext)).setDebugName("Root scope in analyzeNamespace");
+// final WritableScope scope = new WritableScopeImpl(libraryScope, owner, new TraceBasedRedeclarationHandler(bindingTraceContext)).setDebugName("Root scope in analyzeNamespace");
+ final WritableScope scope = new WritableScopeImpl(JetScope.EMPTY, owner, new TraceBasedRedeclarationHandler(bindingTraceContext)).setDebugName("Root scope in analyzeNamespace");
importingStrategy.addImports(project, semanticServices, bindingTraceContext, scope);
+ scope.importScope(libraryScope);
TopDownAnalyzer.process(semanticServices, bindingTraceContext, scope, new NamespaceLike.Adapter(owner) {
@Override
diff --git a/idea/src/org/jetbrains/jet/plugin/JetQuickDocumentationProvider.java b/idea/src/org/jetbrains/jet/plugin/JetQuickDocumentationProvider.java
index 6108b7e0006e2..43ca6abd27f3c 100644
--- a/idea/src/org/jetbrains/jet/plugin/JetQuickDocumentationProvider.java
+++ b/idea/src/org/jetbrains/jet/plugin/JetQuickDocumentationProvider.java
@@ -8,7 +8,6 @@
import org.jetbrains.jet.lang.psi.JetReferenceExpression;
import org.jetbrains.jet.lang.resolve.BindingContext;
import org.jetbrains.jet.lang.resolve.BindingContextUtils;
-import org.jetbrains.jet.lang.resolve.java.AnalyzerFacade;
import org.jetbrains.jet.plugin.compiler.WholeProjectAnalyzerFacade;
import org.jetbrains.jet.resolve.DescriptorRenderer;
|
0856882d2ae0c8aade443918377ea2de8c3db486
|
hadoop
|
svn merge -c 1379565 FIXES: YARN-66. aggregated- logs permissions not set properly (tgraves via bobby)--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1379567 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index d542069b04bd7..3515ae06c5659 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -78,3 +78,5 @@ Release 0.23.3 - Unreleased
YARN-60. Fixed a bug in ResourceManager which causes all NMs to get NPEs and
thus causes all containers to be rejected. (vinodkv)
+
+ YARN-66. aggregated logs permissions not set properly (tgraves via bobby)
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java
index 407fc9ca26e07..008324f013a10 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java
@@ -48,6 +48,7 @@
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.file.tfile.TFile;
import org.apache.hadoop.security.UserGroupInformation;
@@ -68,6 +69,13 @@ public class AggregatedLogFormat {
//Maybe write out a list of containerLogs skipped by the retention policy.
private static final int VERSION = 1;
+ /**
+ * Umask for the log file.
+ */
+ private static final FsPermission APP_LOG_FILE_UMASK = FsPermission
+ .createImmutable((short) (0640 ^ 0777));
+
+
static {
RESERVED_KEYS = new HashMap<String, AggregatedLogFormat.LogKey>();
RESERVED_KEYS.put(APPLICATION_ACL_KEY.toString(), APPLICATION_ACL_KEY);
@@ -194,7 +202,9 @@ public LogWriter(final Configuration conf, final Path remoteAppLogFile,
userUgi.doAs(new PrivilegedExceptionAction<FSDataOutputStream>() {
@Override
public FSDataOutputStream run() throws Exception {
- return FileContext.getFileContext(conf).create(
+ FileContext fc = FileContext.getFileContext(conf);
+ fc.setUMask(APP_LOG_FILE_UMASK);
+ return fc.create(
remoteAppLogFile,
EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE),
new Options.CreateOpts[] {});
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java
index ea8c8f79c5fa5..de755a721564e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java
@@ -32,7 +32,9 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.ContainerId;
@@ -100,6 +102,11 @@ public void testReadAcontainerLogs1() throws Exception {
logWriter.append(logKey, logValue);
logWriter.closeWriter();
+ // make sure permission are correct on the file
+ FileStatus fsStatus = fs.getFileStatus(remoteAppLogFile);
+ Assert.assertEquals("permissions on log aggregation file are wrong",
+ FsPermission.createImmutable((short) 0640), fsStatus.getPermission());
+
LogReader logReader = new LogReader(conf, remoteAppLogFile);
LogKey rLogKey = new LogKey();
DataInputStream dis = logReader.next(rLogKey);
@@ -123,6 +130,7 @@ public void testReadAcontainerLogs1() throws Exception {
Assert.assertEquals(expectedLength, s.length());
}
+
private void writeSrcFile(Path srcFilePath, String fileName, long length)
throws IOException {
|
a2507a8f1c4e9b3c8d727653aaed2c4beaefc13d
|
elasticsearch
|
test: fix and simplify logic--
|
c
|
https://github.com/elastic/elasticsearch
|
diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkAddressTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkAddressTests.java
index 4ccc9f716609e..5847bb75eeb5a 100644
--- a/core/src/test/java/org/elasticsearch/common/network/NetworkAddressTests.java
+++ b/core/src/test/java/org/elasticsearch/common/network/NetworkAddressTests.java
@@ -87,21 +87,13 @@ public void testNoScopeID() throws Exception {
/** creates address without any lookups. hostname can be null, for missing */
private InetAddress forge(String hostname, String address) throws IOException {
- if (hostname == null) {
- return InetAddress.getByName(address);
- } else {
- byte bytes[] = InetAddress.getByName(address).getAddress();
- return InetAddress.getByAddress(hostname, bytes);
- }
+ byte bytes[] = InetAddress.getByName(address).getAddress();
+ return InetAddress.getByAddress(hostname, bytes);
}
/** creates scoped ipv6 address without any lookups. hostname can be null, for missing */
private InetAddress forgeScoped(String hostname, String address, int scopeid) throws IOException {
byte bytes[] = InetAddress.getByName(address).getAddress();
- if (hostname == null) {
- return Inet6Address.getByAddress(hostname, bytes);
- } else {
- return Inet6Address.getByAddress(hostname, bytes, scopeid);
- }
+ return Inet6Address.getByAddress(hostname, bytes, scopeid);
}
}
|
ca6ac1997f045f8093a563fb58b79c01bf7d01c3
|
hadoop
|
HDFS-7144. Fix findbugs warnings in- RamDiskReplicaTracker. (Contributed by Tsz Wo Nicholas Sze)--Conflicts:- hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-6581.txt-
|
p
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RamDiskReplicaLruTracker.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RamDiskReplicaLruTracker.java
index 0899e703a9698..78080034ae8c7 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RamDiskReplicaLruTracker.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RamDiskReplicaLruTracker.java
@@ -40,6 +40,16 @@ private class RamDiskReplicaLru extends RamDiskReplica {
private RamDiskReplicaLru(String bpid, long blockId, FsVolumeImpl ramDiskVolume) {
super(bpid, blockId, ramDiskVolume);
}
+
+ @Override
+ public int hashCode() {
+ return super.hashCode();
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ return super.equals(other);
+ }
}
/**
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RamDiskReplicaTracker.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RamDiskReplicaTracker.java
index 03fc06802747a..24014247377d8 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RamDiskReplicaTracker.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RamDiskReplicaTracker.java
@@ -19,6 +19,9 @@
package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl;
import com.google.common.base.Preconditions;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
@@ -31,6 +34,7 @@
@InterfaceAudience.Private
@InterfaceStability.Unstable
public abstract class RamDiskReplicaTracker {
+ static final Log LOG = LogFactory.getLog(RamDiskReplicaTracker.class);
FsDatasetImpl fsDataset;
@@ -117,18 +121,18 @@ public boolean equals(Object other) {
// Delete the saved meta and block files. Failure to delete can be
// ignored, the directory scanner will retry the deletion later.
void deleteSavedFiles() {
- try {
- if (savedBlockFile != null) {
- savedBlockFile.delete();
- savedBlockFile = null;
+ if (savedBlockFile != null) {
+ if (!savedBlockFile.delete()) {
+ LOG.warn("Failed to delete block file " + savedBlockFile);
}
+ savedBlockFile = null;
+ }
- if (savedMetaFile != null) {
- savedMetaFile.delete();
- savedMetaFile = null;
+ if (savedMetaFile != null) {
+ if (!savedMetaFile.delete()) {
+ LOG.warn("Failed to delete meta file " + savedMetaFile);
}
- } catch (Throwable t) {
- // Ignore any exceptions.
+ savedMetaFile = null;
}
}
|
3e0e71690424ab386f97e3b3f18751748cf52e49
|
intellij-community
|
FrameWrapper: do not restore frame state as- iconified (IDEA-87792)--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/platform/platform-impl/src/com/intellij/openapi/ui/FrameWrapper.java b/platform/platform-impl/src/com/intellij/openapi/ui/FrameWrapper.java
index bde382d88062c..2eac7039600d0 100644
--- a/platform/platform-impl/src/com/intellij/openapi/ui/FrameWrapper.java
+++ b/platform/platform-impl/src/com/intellij/openapi/ui/FrameWrapper.java
@@ -309,7 +309,7 @@ protected void loadFrameState() {
}
}
- if (extendedState == Frame.ICONIFIED || extendedState == Frame.MAXIMIZED_BOTH && frame instanceof JFrame) {
+ if (extendedState == Frame.MAXIMIZED_BOTH && frame instanceof JFrame) {
((JFrame)frame).setExtendedState(extendedState);
}
}
|
6b3023c2aa76386a0d3b437d593bfd65697dc169
|
spring-framework
|
HandlerExecutionChain prevents re-adding the- interceptors array to the list (and declares varargs now)--Issue: SPR-12566-
|
c
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-webmvc-portlet/src/main/java/org/springframework/web/portlet/HandlerExecutionChain.java b/spring-webmvc-portlet/src/main/java/org/springframework/web/portlet/HandlerExecutionChain.java
index 80c64e29545f..e09a01986843 100644
--- a/spring-webmvc-portlet/src/main/java/org/springframework/web/portlet/HandlerExecutionChain.java
+++ b/spring-webmvc-portlet/src/main/java/org/springframework/web/portlet/HandlerExecutionChain.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2012 the original author or authors.
+ * Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -21,6 +21,7 @@
import java.util.List;
import org.springframework.util.CollectionUtils;
+import org.springframework.util.ObjectUtils;
/**
* Handler execution chain, consisting of handler object and any handler interceptors.
@@ -45,7 +46,7 @@ public class HandlerExecutionChain {
* @param handler the handler object to execute
*/
public HandlerExecutionChain(Object handler) {
- this(handler, null);
+ this(handler, (HandlerInterceptor[]) null);
}
/**
@@ -54,7 +55,7 @@ public HandlerExecutionChain(Object handler) {
* @param interceptors the array of interceptors to apply
* (in the given order) before the handler itself executes
*/
- public HandlerExecutionChain(Object handler, HandlerInterceptor[] interceptors) {
+ public HandlerExecutionChain(Object handler, HandlerInterceptor... interceptors) {
if (handler instanceof HandlerExecutionChain) {
HandlerExecutionChain originalChain = (HandlerExecutionChain) handler;
this.handler = originalChain.getHandler();
@@ -78,25 +79,25 @@ public Object getHandler() {
}
public void addInterceptor(HandlerInterceptor interceptor) {
- initInterceptorList();
- this.interceptorList.add(interceptor);
+ initInterceptorList().add(interceptor);
}
- public void addInterceptors(HandlerInterceptor[] interceptors) {
- if (interceptors != null) {
- initInterceptorList();
- this.interceptorList.addAll(Arrays.asList(interceptors));
+ public void addInterceptors(HandlerInterceptor... interceptors) {
+ if (!ObjectUtils.isEmpty(interceptors)) {
+ initInterceptorList().addAll(Arrays.asList(interceptors));
}
}
- private void initInterceptorList() {
+ private List<HandlerInterceptor> initInterceptorList() {
if (this.interceptorList == null) {
this.interceptorList = new ArrayList<HandlerInterceptor>();
+ if (this.interceptors != null) {
+ // An interceptor array specified through the constructor
+ this.interceptorList.addAll(Arrays.asList(this.interceptors));
+ }
}
- if (this.interceptors != null) {
- this.interceptorList.addAll(Arrays.asList(this.interceptors));
- this.interceptors = null;
- }
+ this.interceptors = null;
+ return this.interceptorList;
}
/**
diff --git a/spring-webmvc/src/main/java/org/springframework/web/servlet/HandlerExecutionChain.java b/spring-webmvc/src/main/java/org/springframework/web/servlet/HandlerExecutionChain.java
index 06495c3b6fd9..6fab48f20276 100644
--- a/spring-webmvc/src/main/java/org/springframework/web/servlet/HandlerExecutionChain.java
+++ b/spring-webmvc/src/main/java/org/springframework/web/servlet/HandlerExecutionChain.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2012 the original author or authors.
+ * Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -26,6 +26,7 @@
import org.apache.commons.logging.LogFactory;
import org.springframework.util.CollectionUtils;
+import org.springframework.util.ObjectUtils;
/**
* Handler execution chain, consisting of handler object and any handler interceptors.
@@ -53,7 +54,7 @@ public class HandlerExecutionChain {
* @param handler the handler object to execute
*/
public HandlerExecutionChain(Object handler) {
- this(handler, null);
+ this(handler, (HandlerInterceptor[]) null);
}
/**
@@ -62,7 +63,7 @@ public HandlerExecutionChain(Object handler) {
* @param interceptors the array of interceptors to apply
* (in the given order) before the handler itself executes
*/
- public HandlerExecutionChain(Object handler, HandlerInterceptor[] interceptors) {
+ public HandlerExecutionChain(Object handler, HandlerInterceptor... interceptors) {
if (handler instanceof HandlerExecutionChain) {
HandlerExecutionChain originalChain = (HandlerExecutionChain) handler;
this.handler = originalChain.getHandler();
@@ -76,6 +77,7 @@ public HandlerExecutionChain(Object handler, HandlerInterceptor[] interceptors)
}
}
+
/**
* Return the handler object to execute.
* @return the handler object
@@ -85,25 +87,25 @@ public Object getHandler() {
}
public void addInterceptor(HandlerInterceptor interceptor) {
- initInterceptorList();
- this.interceptorList.add(interceptor);
+ initInterceptorList().add(interceptor);
}
- public void addInterceptors(HandlerInterceptor[] interceptors) {
- if (interceptors != null) {
- initInterceptorList();
- this.interceptorList.addAll(Arrays.asList(interceptors));
+ public void addInterceptors(HandlerInterceptor... interceptors) {
+ if (!ObjectUtils.isEmpty(interceptors)) {
+ initInterceptorList().addAll(Arrays.asList(interceptors));
}
}
- private void initInterceptorList() {
+ private List<HandlerInterceptor> initInterceptorList() {
if (this.interceptorList == null) {
this.interceptorList = new ArrayList<HandlerInterceptor>();
+ if (this.interceptors != null) {
+ // An interceptor array specified through the constructor
+ this.interceptorList.addAll(Arrays.asList(this.interceptors));
+ }
}
- if (this.interceptors != null) {
- this.interceptorList.addAll(Arrays.asList(this.interceptors));
- this.interceptors = null;
- }
+ this.interceptors = null;
+ return this.interceptorList;
}
/**
@@ -117,6 +119,7 @@ public HandlerInterceptor[] getInterceptors() {
return this.interceptors;
}
+
/**
* Apply preHandle methods of registered interceptors.
* @return {@code true} if the execution chain should proceed with the
@@ -124,9 +127,10 @@ public HandlerInterceptor[] getInterceptors() {
* that this interceptor has already dealt with the response itself.
*/
boolean applyPreHandle(HttpServletRequest request, HttpServletResponse response) throws Exception {
- if (getInterceptors() != null) {
- for (int i = 0; i < getInterceptors().length; i++) {
- HandlerInterceptor interceptor = getInterceptors()[i];
+ HandlerInterceptor[] interceptors = getInterceptors();
+ if (!ObjectUtils.isEmpty(interceptors)) {
+ for (int i = 0; i < interceptors.length; i++) {
+ HandlerInterceptor interceptor = interceptors[i];
if (!interceptor.preHandle(request, response, this.handler)) {
triggerAfterCompletion(request, response, null);
return false;
@@ -141,12 +145,12 @@ boolean applyPreHandle(HttpServletRequest request, HttpServletResponse response)
* Apply postHandle methods of registered interceptors.
*/
void applyPostHandle(HttpServletRequest request, HttpServletResponse response, ModelAndView mv) throws Exception {
- if (getInterceptors() == null) {
- return;
- }
- for (int i = getInterceptors().length - 1; i >= 0; i--) {
- HandlerInterceptor interceptor = getInterceptors()[i];
- interceptor.postHandle(request, response, this.handler, mv);
+ HandlerInterceptor[] interceptors = getInterceptors();
+ if (!ObjectUtils.isEmpty(interceptors)) {
+ for (int i = interceptors.length - 1; i >= 0; i--) {
+ HandlerInterceptor interceptor = interceptors[i];
+ interceptor.postHandle(request, response, this.handler, mv);
+ }
}
}
@@ -158,16 +162,16 @@ void applyPostHandle(HttpServletRequest request, HttpServletResponse response, M
void triggerAfterCompletion(HttpServletRequest request, HttpServletResponse response, Exception ex)
throws Exception {
- if (getInterceptors() == null) {
- return;
- }
- for (int i = this.interceptorIndex; i >= 0; i--) {
- HandlerInterceptor interceptor = getInterceptors()[i];
- try {
- interceptor.afterCompletion(request, response, this.handler, ex);
- }
- catch (Throwable ex2) {
- logger.error("HandlerInterceptor.afterCompletion threw exception", ex2);
+ HandlerInterceptor[] interceptors = getInterceptors();
+ if (!ObjectUtils.isEmpty(interceptors)) {
+ for (int i = this.interceptorIndex; i >= 0; i--) {
+ HandlerInterceptor interceptor = interceptors[i];
+ try {
+ interceptor.afterCompletion(request, response, this.handler, ex);
+ }
+ catch (Throwable ex2) {
+ logger.error("HandlerInterceptor.afterCompletion threw exception", ex2);
+ }
}
}
}
@@ -176,22 +180,23 @@ void triggerAfterCompletion(HttpServletRequest request, HttpServletResponse resp
* Apply afterConcurrentHandlerStarted callback on mapped AsyncHandlerInterceptors.
*/
void applyAfterConcurrentHandlingStarted(HttpServletRequest request, HttpServletResponse response) {
- if (getInterceptors() == null) {
- return;
- }
- for (int i = getInterceptors().length - 1; i >= 0; i--) {
- if (interceptors[i] instanceof AsyncHandlerInterceptor) {
- try {
- AsyncHandlerInterceptor asyncInterceptor = (AsyncHandlerInterceptor) this.interceptors[i];
- asyncInterceptor.afterConcurrentHandlingStarted(request, response, this.handler);
- }
- catch (Throwable ex) {
- logger.error("Interceptor [" + interceptors[i] + "] failed in afterConcurrentHandlingStarted", ex);
+ HandlerInterceptor[] interceptors = getInterceptors();
+ if (!ObjectUtils.isEmpty(interceptors)) {
+ for (int i = interceptors.length - 1; i >= 0; i--) {
+ if (interceptors[i] instanceof AsyncHandlerInterceptor) {
+ try {
+ AsyncHandlerInterceptor asyncInterceptor = (AsyncHandlerInterceptor) interceptors[i];
+ asyncInterceptor.afterConcurrentHandlingStarted(request, response, this.handler);
+ }
+ catch (Throwable ex) {
+ logger.error("Interceptor [" + interceptors[i] + "] failed in afterConcurrentHandlingStarted", ex);
+ }
}
}
}
}
+
/**
* Delegates to the handler's {@code toString()}.
*/
diff --git a/spring-webmvc/src/test/java/org/springframework/web/servlet/HandlerExecutionChainTests.java b/spring-webmvc/src/test/java/org/springframework/web/servlet/HandlerExecutionChainTests.java
index 9d408be31059..b2aac29ac907 100644
--- a/spring-webmvc/src/test/java/org/springframework/web/servlet/HandlerExecutionChainTests.java
+++ b/spring-webmvc/src/test/java/org/springframework/web/servlet/HandlerExecutionChainTests.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2013 the original author or authors.
+ * Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -46,6 +46,7 @@ public class HandlerExecutionChainTests {
private AsyncHandlerInterceptor interceptor3;
+
@Before
public void setup() {
this.request = new MockHttpServletRequest();
@@ -60,9 +61,12 @@ public void setup() {
this.chain.addInterceptor(this.interceptor1);
this.chain.addInterceptor(this.interceptor2);
+ assertEquals(2, this.chain.getInterceptors().length);
this.chain.addInterceptor(this.interceptor3);
+ assertEquals(3, this.chain.getInterceptors().length);
}
+
@Test
public void successScenario() throws Exception {
ModelAndView mav = new ModelAndView();
|
f400db0d9b2e1ea03aa26c811f0b8b8ec9cccc61
|
hbase
|
HBASE-8737 [replication] Change replication RPC- to use cell blocks--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1499118 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
index 79eae2064c20..f7562af19770 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
@@ -94,6 +94,23 @@ public Delete(byte [] row, long timestamp) {
this(row, 0, row.length, timestamp);
}
+ /**
+ * Create a Delete operation for the specified row and timestamp.<p>
+ *
+ * If no further operations are done, this will delete all columns in all
+ * families of the specified row with a timestamp less than or equal to the
+ * specified timestamp.<p>
+ *
+ * This timestamp is ONLY used for a delete row operation. If specifying
+ * families or columns, you must specify each timestamp individually.
+ * @param rowArray We make a local copy of this passed in row.
+ * @param rowOffset
+ * @param rowLength
+ */
+ public Delete(final byte [] rowArray, final int rowOffset, final int rowLength) {
+ this(rowArray, rowOffset, rowLength, HConstants.LATEST_TIMESTAMP);
+ }
+
/**
* Create a Delete operation for the specified row and timestamp.<p>
*
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
index be028c644f90..1e388a3b3860 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
@@ -63,6 +63,17 @@ public Put(byte[] row, long ts) {
this(row, 0, row.length, ts);
}
+ /**
+ * We make a copy of the passed in row key to keep local.
+ * @param rowArray
+ * @param rowOffset
+ * @param rowLength
+ * @param ts
+ */
+ public Put(byte [] rowArray, int rowOffset, int rowLength) {
+ this(rowArray, rowOffset, rowLength, HConstants.LATEST_TIMESTAMP);
+ }
+
/**
* We make a copy of the passed in row key to keep local.
* @param rowArray
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
index 13b55a12bf18..a08f375a00ae 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
@@ -33,10 +33,9 @@
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.io.ByteBufferOutputStream;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest;
-import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest;
-import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest;
+import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.io.compress.CodecPool;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionInputStream;
@@ -47,49 +46,61 @@
import com.google.protobuf.CodedInputStream;
import com.google.protobuf.CodedOutputStream;
import com.google.protobuf.Message;
-import com.google.protobuf.TextFormat;
/**
* Utility to help ipc'ing.
*/
class IPCUtil {
public static final Log LOG = LogFactory.getLog(IPCUtil.class);
- private final int cellBlockBuildingInitialBufferSize;
/**
* How much we think the decompressor will expand the original compressed content.
*/
private final int cellBlockDecompressionMultiplier;
+ private final int cellBlockBuildingInitialBufferSize;
private final Configuration conf;
IPCUtil(final Configuration conf) {
super();
this.conf = conf;
- this.cellBlockBuildingInitialBufferSize =
- conf.getInt("hbase.ipc.cellblock.building.initial.buffersize", 16 * 1024);
this.cellBlockDecompressionMultiplier =
conf.getInt("hbase.ipc.cellblock.decompression.buffersize.multiplier", 3);
+ // Guess that 16k is a good size for rpc buffer. Could go bigger. See the TODO below in
+ // #buildCellBlock.
+ this.cellBlockBuildingInitialBufferSize =
+ ClassSize.align(conf.getInt("hbase.ipc.cellblock.building.initial.buffersize", 16 * 1024));
}
/**
- * Build a cell block using passed in <code>codec</code>
+ * Puts CellScanner Cells into a cell block using passed in <code>codec</code> and/or
+ * <code>compressor</code>.
* @param codec
* @param compressor
- * @Param cells
- * @return Null or byte buffer filled with passed-in Cells encoded using passed in
- * <code>codec</code>; the returned buffer has been flipped and is ready for
- * reading. Use limit to find total size.
+ * @Param cellScanner
+ * @return Null or byte buffer filled with a cellblock filled with passed-in Cells encoded using
+ * passed in <code>codec</code> and/or <code>compressor</code>; the returned buffer has been
+ * flipped and is ready for reading. Use limit to find total size.
* @throws IOException
*/
@SuppressWarnings("resource")
ByteBuffer buildCellBlock(final Codec codec, final CompressionCodec compressor,
- final CellScanner cells)
+ final CellScanner cellScanner)
throws IOException {
- if (cells == null) return null;
- // TOOD: Reuse buffers?
- // Presizing doesn't work because can't tell what size will be when serialized.
- // BBOS will resize itself.
- ByteBufferOutputStream baos =
- new ByteBufferOutputStream(this.cellBlockBuildingInitialBufferSize);
+ if (cellScanner == null) return null;
+ int bufferSize = this.cellBlockBuildingInitialBufferSize;
+ if (cellScanner instanceof HeapSize) {
+ long longSize = ((HeapSize)cellScanner).heapSize();
+ // Just make sure we don't have a size bigger than an int.
+ if (longSize > Integer.MAX_VALUE) {
+ throw new IOException("Size " + longSize + " > " + Integer.MAX_VALUE);
+ }
+ bufferSize = ClassSize.align((int)longSize);
+ } // TODO: Else, get estimate on size of buffer rather than have the buffer resize.
+ // See TestIPCUtil main for experiment where we spin through the Cells getting estimate of
+ // total size before creating the buffer. It costs somw small percentage. If we are usually
+ // within the estimated buffer size, then the cost is not worth it. If we are often well
+ // outside the guesstimated buffer size, the processing can be done in half the time if we
+ // go w/ the estimated size rather than let the buffer resize.
+ ByteBufferOutputStream baos = new ByteBufferOutputStream(bufferSize);
OutputStream os = baos;
Compressor poolCompressor = null;
try {
@@ -99,8 +110,8 @@ ByteBuffer buildCellBlock(final Codec codec, final CompressionCodec compressor,
os = compressor.createOutputStream(os, poolCompressor);
}
Codec.Encoder encoder = codec.getEncoder(os);
- while (cells.advance()) {
- encoder.write(cells.current());
+ while (cellScanner.advance()) {
+ encoder.write(cellScanner.current());
}
encoder.flush();
} finally {
@@ -108,9 +119,9 @@ ByteBuffer buildCellBlock(final Codec codec, final CompressionCodec compressor,
if (poolCompressor != null) CodecPool.returnCompressor(poolCompressor);
}
if (LOG.isTraceEnabled()) {
- if (this.cellBlockBuildingInitialBufferSize < baos.size()) {
- LOG.trace("Buffer grew from " + this.cellBlockBuildingInitialBufferSize +
- " to " + baos.size());
+ if (bufferSize < baos.size()) {
+ LOG.trace("Buffer grew from initial bufferSize=" + bufferSize + " to " + baos.size() +
+ "; up hbase.ipc.cellblock.building.initial.buffersize?");
}
}
return baos.getByteBuffer();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
index 9c5d1e50a4f2..92ec9e15c779 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
@@ -1290,4 +1290,4 @@ private static RegionOpenInfo buildRegionOpenInfo(
}
return builder.build();
}
-}
+}
\ No newline at end of file
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java
index 47bee96725f3..1af3810f01f3 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestIPCUtil.java
@@ -23,21 +23,28 @@
import java.nio.ByteBuffer;
import java.util.Arrays;
+import org.apache.commons.lang.time.StopWatch;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.codec.KeyValueCodec;
+import org.apache.hadoop.hbase.io.SizedCellScanner;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.hadoop.io.compress.GzipCodec;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.mortbay.log.Log;
+import org.apache.commons.logging.impl.Log4JLogger;
+import org.apache.log4j.Level;
@Category(SmallTests.class)
public class TestIPCUtil {
@@ -49,33 +56,137 @@ public void before() {
@Test
public void testBuildCellBlock() throws IOException {
- doBuildCellBlockUndoCellBlock(new KeyValueCodec(), null);
- doBuildCellBlockUndoCellBlock(new KeyValueCodec(), new DefaultCodec());
- doBuildCellBlockUndoCellBlock(new KeyValueCodec(), new GzipCodec());
+ doBuildCellBlockUndoCellBlock(this.util, new KeyValueCodec(), null);
+ doBuildCellBlockUndoCellBlock(this.util, new KeyValueCodec(), new DefaultCodec());
+ doBuildCellBlockUndoCellBlock(this.util, new KeyValueCodec(), new GzipCodec());
}
- void doBuildCellBlockUndoCellBlock(final Codec codec, final CompressionCodec compressor)
+ static void doBuildCellBlockUndoCellBlock(final IPCUtil util,
+ final Codec codec, final CompressionCodec compressor)
throws IOException {
- final int count = 10;
- Cell [] cells = getCells(count);
- ByteBuffer bb = this.util.buildCellBlock(codec, compressor,
- CellUtil.createCellScanner(Arrays.asList(cells).iterator()));
- CellScanner scanner =
- this.util.createCellScanner(codec, compressor, bb.array(), 0, bb.limit());
+ doBuildCellBlockUndoCellBlock(util, codec, compressor, 10, 1, false);
+ }
+
+ static void doBuildCellBlockUndoCellBlock(final IPCUtil util, final Codec codec,
+ final CompressionCodec compressor, final int count, final int size, final boolean sized)
+ throws IOException {
+ Cell [] cells = getCells(count, size);
+ CellScanner cellScanner = sized? getSizedCellScanner(cells):
+ CellUtil.createCellScanner(Arrays.asList(cells).iterator());
+ ByteBuffer bb = util.buildCellBlock(codec, compressor, cellScanner);
+ cellScanner = util.createCellScanner(codec, compressor, bb.array(), 0, bb.limit());
int i = 0;
- while (scanner.advance()) {
+ while (cellScanner.advance()) {
i++;
}
assertEquals(count, i);
}
+ static CellScanner getSizedCellScanner(final Cell [] cells) {
+ int size = -1;
+ for (Cell cell: cells) {
+ size += CellUtil.estimatedSizeOf(cell);
+ }
+ final int totalSize = ClassSize.align(size);
+ final CellScanner cellScanner = CellUtil.createCellScanner(cells);
+ return new SizedCellScanner() {
+ @Override
+ public long heapSize() {
+ return totalSize;
+ }
+
+ @Override
+ public Cell current() {
+ return cellScanner.current();
+ }
+
+ @Override
+ public boolean advance() throws IOException {
+ return cellScanner.advance();
+ }
+ };
+ }
+
static Cell [] getCells(final int howMany) {
+ return getCells(howMany, 1024);
+ }
+
+ static Cell [] getCells(final int howMany, final int valueSize) {
Cell [] cells = new Cell[howMany];
+ byte [] value = new byte[valueSize];
for (int i = 0; i < howMany; i++) {
byte [] index = Bytes.toBytes(i);
- KeyValue kv = new KeyValue(index, Bytes.toBytes("f"), index, index);
+ KeyValue kv = new KeyValue(index, Bytes.toBytes("f"), index, value);
cells[i] = kv;
}
return cells;
}
+
+ private static final String COUNT = "--count=";
+ private static final String SIZE = "--size=";
+
+ /**
+ * Prints usage and then exits w/ passed <code>errCode</code>
+ * @param errCode
+ */
+ private static void usage(final int errCode) {
+ System.out.println("Usage: IPCUtil [options]");
+ System.out.println("Micro-benchmarking how changed sizes and counts work with buffer resizing");
+ System.out.println(" --count Count of Cells");
+ System.out.println(" --size Size of Cell values");
+ System.out.println("Example: IPCUtil --count=1024 --size=1024");
+ System.exit(errCode);
+ }
+
+ private static void timerTests(final IPCUtil util, final int count, final int size,
+ final Codec codec, final CompressionCodec compressor)
+ throws IOException {
+ final int cycles = 1000;
+ StopWatch timer = new StopWatch();
+ timer.start();
+ for (int i = 0; i < cycles; i++) {
+ timerTest(util, timer, count, size, codec, compressor, false);
+ }
+ timer.stop();
+ Log.info("Codec=" + codec + ", compression=" + compressor + ", sized=" + false +
+ ", count=" + count + ", size=" + size + ", + took=" + timer.getTime() + "ms");
+ timer.reset();
+ timer.start();
+ for (int i = 0; i < cycles; i++) {
+ timerTest(util, timer, count, size, codec, compressor, true);
+ }
+ timer.stop();
+ Log.info("Codec=" + codec + ", compression=" + compressor + ", sized=" + true +
+ ", count=" + count + ", size=" + size + ", + took=" + timer.getTime() + "ms");
+ }
+
+ private static void timerTest(final IPCUtil util, final StopWatch timer, final int count,
+ final int size, final Codec codec, final CompressionCodec compressor, final boolean sized)
+ throws IOException {
+ doBuildCellBlockUndoCellBlock(util, codec, compressor, count, size, sized);
+ }
+
+ /**
+ * For running a few tests of methods herein.
+ * @param args
+ * @throws IOException
+ */
+ public static void main(String[] args) throws IOException {
+ int count = 1024;
+ int size = 10240;
+ for (String arg: args) {
+ if (arg.startsWith(COUNT)) {
+ count = Integer.parseInt(arg.replace(COUNT, ""));
+ } else if (arg.startsWith(SIZE)) {
+ size = Integer.parseInt(arg.replace(SIZE, ""));
+ } else {
+ usage(1);
+ }
+ }
+ IPCUtil util = new IPCUtil(HBaseConfiguration.create());
+ ((Log4JLogger)IPCUtil.LOG).getLogger().setLevel(Level.ALL);
+ timerTests(util, count, size, new KeyValueCodec(), null);
+ timerTests(util, count, size, new KeyValueCodec(), new DefaultCodec());
+ timerTests(util, count, size, new KeyValueCodec(), new GzipCodec());
+ }
}
\ No newline at end of file
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index c677a0d11796..230d48adeaaf 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -28,6 +28,7 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.ByteRange;
+import org.apache.hadoop.hbase.util.Bytes;
/**
* Utility methods helpful slinging {@link Cell} instances.
@@ -242,4 +243,44 @@ public boolean advance() {
}
};
}
-}
+
+ /**
+ * @param left
+ * @param right
+ * @return True if the rows in <code>left</code> and <code>right</code> Cells match
+ */
+ public static boolean matchingRow(final Cell left, final Cell right) {
+ return Bytes.equals(left.getRowArray(), left.getRowOffset(), left.getRowLength(),
+ right.getRowArray(), right.getRowOffset(), right.getRowLength());
+ }
+
+ /**
+ * @return True if a delete type, a {@link KeyValue.Type#Delete} or
+ * a {KeyValue.Type#DeleteFamily} or a {@link KeyValue.Type#DeleteColumn}
+ * KeyValue type.
+ */
+ public static boolean isDelete(final Cell cell) {
+ return KeyValue.isDelete(cell.getTypeByte());
+ }
+
+ /**
+ * @param cell
+ * @return Estimate of the <code>cell</code> size in bytes.
+ */
+ public static int estimatedSizeOf(final Cell cell) {
+ // If a KeyValue, we can give a good estimate of size.
+ if (cell instanceof KeyValue) {
+ return ((KeyValue)cell).getLength() + Bytes.SIZEOF_INT;
+ }
+ // TODO: Should we add to Cell a sizeOf? Would it help? Does it make sense if Cell is
+ // prefix encoded or compressed?
+ return cell.getRowLength() + cell.getFamilyLength() +
+ cell.getQualifierLength() +
+ cell.getValueLength() +
+ // Use the KeyValue's infrastructure size presuming that another implementation would have
+ // same basic cost.
+ KeyValue.KEY_INFRASTRUCTURE_SIZE +
+ // Serialization is probably preceded by a length (it is in the KeyValueCodec at least).
+ Bytes.SIZEOF_INT;
+ }
+}
\ No newline at end of file
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index a589cceba88a..e32163fe3fd0 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -20,9 +20,7 @@
package org.apache.hadoop.hbase;
import java.io.DataInput;
-import java.io.DataInputStream;
import java.io.DataOutput;
-import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
@@ -37,7 +35,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/HeapSize.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/HeapSize.java
index 10ff57c54502..23b172d47201 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/HeapSize.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/HeapSize.java
@@ -46,5 +46,4 @@ public interface HeapSize {
* count of payload and hosting object sizings.
*/
public long heapSize();
-
-}
+}
\ No newline at end of file
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/SizedCellScanner.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/SizedCellScanner.java
new file mode 100644
index 000000000000..0206e05e9acd
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/SizedCellScanner.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.io;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+import org.apache.hadoop.hbase.CellScanner;
+
+/**
+ * A CellScanner that knows its size in memory in bytes.
+ * Used playing the CellScanner into an in-memory buffer; knowing the size ahead of time saves
+ * on background buffer resizings.
+ */
[email protected]
[email protected]
+public interface SizedCellScanner extends CellScanner, HeapSize {}
\ No newline at end of file
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java
index da5d121b3feb..a3bd954ad693 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java
@@ -9894,6 +9894,10 @@ public interface WALEntryOrBuilder
java.util.List<com.google.protobuf.ByteString> getKeyValueBytesList();
int getKeyValueBytesCount();
com.google.protobuf.ByteString getKeyValueBytes(int index);
+
+ // optional int32 associatedCellCount = 3;
+ boolean hasAssociatedCellCount();
+ int getAssociatedCellCount();
}
public static final class WALEntry extends
com.google.protobuf.GeneratedMessage
@@ -9951,9 +9955,20 @@ public com.google.protobuf.ByteString getKeyValueBytes(int index) {
return keyValueBytes_.get(index);
}
+ // optional int32 associatedCellCount = 3;
+ public static final int ASSOCIATEDCELLCOUNT_FIELD_NUMBER = 3;
+ private int associatedCellCount_;
+ public boolean hasAssociatedCellCount() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public int getAssociatedCellCount() {
+ return associatedCellCount_;
+ }
+
private void initFields() {
key_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance();
keyValueBytes_ = java.util.Collections.emptyList();;
+ associatedCellCount_ = 0;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
@@ -9981,6 +9996,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
for (int i = 0; i < keyValueBytes_.size(); i++) {
output.writeBytes(2, keyValueBytes_.get(i));
}
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeInt32(3, associatedCellCount_);
+ }
getUnknownFields().writeTo(output);
}
@@ -10003,6 +10021,10 @@ public int getSerializedSize() {
size += dataSize;
size += 1 * getKeyValueBytesList().size();
}
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt32Size(3, associatedCellCount_);
+ }
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
@@ -10033,6 +10055,11 @@ public boolean equals(final java.lang.Object obj) {
}
result = result && getKeyValueBytesList()
.equals(other.getKeyValueBytesList());
+ result = result && (hasAssociatedCellCount() == other.hasAssociatedCellCount());
+ if (hasAssociatedCellCount()) {
+ result = result && (getAssociatedCellCount()
+ == other.getAssociatedCellCount());
+ }
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
@@ -10050,6 +10077,10 @@ public int hashCode() {
hash = (37 * hash) + KEYVALUEBYTES_FIELD_NUMBER;
hash = (53 * hash) + getKeyValueBytesList().hashCode();
}
+ if (hasAssociatedCellCount()) {
+ hash = (37 * hash) + ASSOCIATEDCELLCOUNT_FIELD_NUMBER;
+ hash = (53 * hash) + getAssociatedCellCount();
+ }
hash = (29 * hash) + getUnknownFields().hashCode();
return hash;
}
@@ -10175,6 +10206,8 @@ public Builder clear() {
bitField0_ = (bitField0_ & ~0x00000001);
keyValueBytes_ = java.util.Collections.emptyList();;
bitField0_ = (bitField0_ & ~0x00000002);
+ associatedCellCount_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
@@ -10226,6 +10259,10 @@ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry buildPart
bitField0_ = (bitField0_ & ~0x00000002);
}
result.keyValueBytes_ = keyValueBytes_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.associatedCellCount_ = associatedCellCount_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
@@ -10255,6 +10292,9 @@ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.
}
onChanged();
}
+ if (other.hasAssociatedCellCount()) {
+ setAssociatedCellCount(other.getAssociatedCellCount());
+ }
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
@@ -10308,6 +10348,11 @@ public Builder mergeFrom(
keyValueBytes_.add(input.readBytes());
break;
}
+ case 24: {
+ bitField0_ |= 0x00000004;
+ associatedCellCount_ = input.readInt32();
+ break;
+ }
}
}
}
@@ -10455,6 +10500,27 @@ public Builder clearKeyValueBytes() {
return this;
}
+ // optional int32 associatedCellCount = 3;
+ private int associatedCellCount_ ;
+ public boolean hasAssociatedCellCount() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public int getAssociatedCellCount() {
+ return associatedCellCount_;
+ }
+ public Builder setAssociatedCellCount(int value) {
+ bitField0_ |= 0x00000004;
+ associatedCellCount_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearAssociatedCellCount() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ associatedCellCount_ = 0;
+ onChanged();
+ return this;
+ }
+
// @@protoc_insertion_point(builder_scope:WALEntry)
}
@@ -15359,41 +15425,41 @@ public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse
"gionsRequest\022!\n\007regionA\030\001 \002(\0132\020.RegionSp" +
"ecifier\022!\n\007regionB\030\002 \002(\0132\020.RegionSpecifi" +
"er\022\027\n\010forcible\030\003 \001(\010:\005false\"\026\n\024MergeRegi" +
- "onsResponse\"7\n\010WALEntry\022\024\n\003key\030\001 \002(\0132\007.W",
- "ALKey\022\025\n\rkeyValueBytes\030\002 \003(\014\"4\n\030Replicat" +
- "eWALEntryRequest\022\030\n\005entry\030\001 \003(\0132\t.WALEnt" +
- "ry\"\033\n\031ReplicateWALEntryResponse\"\026\n\024RollW" +
- "ALWriterRequest\".\n\025RollWALWriterResponse" +
- "\022\025\n\rregionToFlush\030\001 \003(\014\"#\n\021StopServerReq" +
- "uest\022\016\n\006reason\030\001 \002(\t\"\024\n\022StopServerRespon" +
- "se\"\026\n\024GetServerInfoRequest\"@\n\nServerInfo" +
- "\022\037\n\nserverName\030\001 \002(\0132\013.ServerName\022\021\n\tweb" +
- "uiPort\030\002 \001(\r\"8\n\025GetServerInfoResponse\022\037\n" +
- "\nserverInfo\030\001 \002(\0132\013.ServerInfo2\337\006\n\014Admin",
- "Service\022>\n\rgetRegionInfo\022\025.GetRegionInfo" +
- "Request\032\026.GetRegionInfoResponse\022;\n\014getSt" +
- "oreFile\022\024.GetStoreFileRequest\032\025.GetStore" +
- "FileResponse\022D\n\017getOnlineRegion\022\027.GetOnl" +
- "ineRegionRequest\032\030.GetOnlineRegionRespon" +
- "se\0225\n\nopenRegion\022\022.OpenRegionRequest\032\023.O" +
- "penRegionResponse\0228\n\013closeRegion\022\023.Close" +
- "RegionRequest\032\024.CloseRegionResponse\0228\n\013f" +
- "lushRegion\022\023.FlushRegionRequest\032\024.FlushR" +
- "egionResponse\0228\n\013splitRegion\022\023.SplitRegi",
- "onRequest\032\024.SplitRegionResponse\022>\n\rcompa" +
- "ctRegion\022\025.CompactRegionRequest\032\026.Compac" +
- "tRegionResponse\022;\n\014mergeRegions\022\024.MergeR" +
- "egionsRequest\032\025.MergeRegionsResponse\022J\n\021" +
- "replicateWALEntry\022\031.ReplicateWALEntryReq" +
- "uest\032\032.ReplicateWALEntryResponse\022\'\n\006repl" +
- "ay\022\r.MultiRequest\032\016.MultiResponse\022>\n\rrol" +
- "lWALWriter\022\025.RollWALWriterRequest\032\026.Roll" +
- "WALWriterResponse\022>\n\rgetServerInfo\022\025.Get" +
- "ServerInfoRequest\032\026.GetServerInfoRespons",
- "e\0225\n\nstopServer\022\022.StopServerRequest\032\023.St" +
- "opServerResponseBA\n*org.apache.hadoop.hb" +
- "ase.protobuf.generatedB\013AdminProtosH\001\210\001\001" +
- "\240\001\001"
+ "onsResponse\"T\n\010WALEntry\022\024\n\003key\030\001 \002(\0132\007.W",
+ "ALKey\022\025\n\rkeyValueBytes\030\002 \003(\014\022\033\n\023associat" +
+ "edCellCount\030\003 \001(\005\"4\n\030ReplicateWALEntryRe" +
+ "quest\022\030\n\005entry\030\001 \003(\0132\t.WALEntry\"\033\n\031Repli" +
+ "cateWALEntryResponse\"\026\n\024RollWALWriterReq" +
+ "uest\".\n\025RollWALWriterResponse\022\025\n\rregionT" +
+ "oFlush\030\001 \003(\014\"#\n\021StopServerRequest\022\016\n\006rea" +
+ "son\030\001 \002(\t\"\024\n\022StopServerResponse\"\026\n\024GetSe" +
+ "rverInfoRequest\"@\n\nServerInfo\022\037\n\nserverN" +
+ "ame\030\001 \002(\0132\013.ServerName\022\021\n\twebuiPort\030\002 \001(" +
+ "\r\"8\n\025GetServerInfoResponse\022\037\n\nserverInfo",
+ "\030\001 \002(\0132\013.ServerInfo2\337\006\n\014AdminService\022>\n\r" +
+ "getRegionInfo\022\025.GetRegionInfoRequest\032\026.G" +
+ "etRegionInfoResponse\022;\n\014getStoreFile\022\024.G" +
+ "etStoreFileRequest\032\025.GetStoreFileRespons" +
+ "e\022D\n\017getOnlineRegion\022\027.GetOnlineRegionRe" +
+ "quest\032\030.GetOnlineRegionResponse\0225\n\nopenR" +
+ "egion\022\022.OpenRegionRequest\032\023.OpenRegionRe" +
+ "sponse\0228\n\013closeRegion\022\023.CloseRegionReque" +
+ "st\032\024.CloseRegionResponse\0228\n\013flushRegion\022" +
+ "\023.FlushRegionRequest\032\024.FlushRegionRespon",
+ "se\0228\n\013splitRegion\022\023.SplitRegionRequest\032\024" +
+ ".SplitRegionResponse\022>\n\rcompactRegion\022\025." +
+ "CompactRegionRequest\032\026.CompactRegionResp" +
+ "onse\022;\n\014mergeRegions\022\024.MergeRegionsReque" +
+ "st\032\025.MergeRegionsResponse\022J\n\021replicateWA" +
+ "LEntry\022\031.ReplicateWALEntryRequest\032\032.Repl" +
+ "icateWALEntryResponse\022\'\n\006replay\022\r.MultiR" +
+ "equest\032\016.MultiResponse\022>\n\rrollWALWriter\022" +
+ "\025.RollWALWriterRequest\032\026.RollWALWriterRe" +
+ "sponse\022>\n\rgetServerInfo\022\025.GetServerInfoR",
+ "equest\032\026.GetServerInfoResponse\0225\n\nstopSe" +
+ "rver\022\022.StopServerRequest\032\023.StopServerRes" +
+ "ponseBA\n*org.apache.hadoop.hbase.protobu" +
+ "f.generatedB\013AdminProtosH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -15557,7 +15623,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors(
internal_static_WALEntry_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_WALEntry_descriptor,
- new java.lang.String[] { "Key", "KeyValueBytes", },
+ new java.lang.String[] { "Key", "KeyValueBytes", "AssociatedCellCount", },
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.class,
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder.class);
internal_static_ReplicateWALEntryRequest_descriptor =
diff --git a/hbase-protocol/src/main/protobuf/Admin.proto b/hbase-protocol/src/main/protobuf/Admin.proto
index 733eadff5712..b3c23af28e61 100644
--- a/hbase-protocol/src/main/protobuf/Admin.proto
+++ b/hbase-protocol/src/main/protobuf/Admin.proto
@@ -161,14 +161,18 @@ message MergeRegionsResponse {
// Protocol buffer version of WAL for replication
message WALEntry {
required WALKey key = 1;
+ // Following may be null if the KVs/Cells are carried along the side in a cellblock (See
+ // RPC for more on cellblocks). If Cells/KVs are in a cellblock, this next field is null
+ // and associatedCellCount has count of Cells associated w/ this WALEntry
repeated bytes keyValueBytes = 2;
+ // If Cell data is carried alongside in a cellblock, this is count of Cells in the cellblock.
+ optional int32 associatedCellCount = 3;
}
/**
* Replicates the given entries. The guarantee is that the given entries
* will be durable on the slave cluster if this method returns without
- * any exception.
- * hbase.replication has to be set to true for this to work.
+ * any exception. hbase.replication has to be set to true for this to work.
*/
message ReplicateWALEntryRequest {
repeated WALEntry entry = 1;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
index 29f01541443d..0bcd53500733 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
@@ -20,26 +20,32 @@
package org.apache.hadoop.hbase.protobuf;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.ServiceException;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.NavigableMap;
+import java.util.TreeMap;
+import java.util.UUID;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.io.SizedCellScanner;
+import org.apache.hadoop.hbase.ipc.PayloadCarryingRpcController;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
+import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos;
-import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService;
import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Pair;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.NavigableMap;
-import java.util.TreeMap;
-import java.util.UUID;
+import com.google.protobuf.ByteString;
+import com.google.protobuf.ServiceException;
public class ReplicationProtbufUtil {
/**
@@ -81,10 +87,11 @@ public class ReplicationProtbufUtil {
*/
public static void replicateWALEntry(final AdminService.BlockingInterface admin,
final HLog.Entry[] entries) throws IOException {
- AdminProtos.ReplicateWALEntryRequest request =
+ Pair<AdminProtos.ReplicateWALEntryRequest, CellScanner> p =
buildReplicateWALEntryRequest(entries);
try {
- admin.replicateWALEntry(null, request);
+ PayloadCarryingRpcController controller = new PayloadCarryingRpcController(p.getSecond());
+ admin.replicateWALEntry(controller, p.getFirst());
} catch (ServiceException se) {
throw ProtobufUtil.getRemoteException(se);
}
@@ -94,10 +101,14 @@ public static void replicateWALEntry(final AdminService.BlockingInterface admin,
* Create a new ReplicateWALEntryRequest from a list of HLog entries
*
* @param entries the HLog entries to be replicated
- * @return a ReplicateWALEntryRequest
+ * @return a pair of ReplicateWALEntryRequest and a CellScanner over all the WALEdit values
+ * found.
*/
- public static AdminProtos.ReplicateWALEntryRequest
+ public static Pair<AdminProtos.ReplicateWALEntryRequest, CellScanner>
buildReplicateWALEntryRequest(final HLog.Entry[] entries) {
+ // Accumulate all the KVs seen in here.
+ List<List<? extends Cell>> allkvs = new ArrayList<List<? extends Cell>>(entries.length);
+ int size = 0;
WALProtos.FamilyScope.Builder scopeBuilder = WALProtos.FamilyScope.newBuilder();
AdminProtos.WALEntry.Builder entryBuilder = AdminProtos.WALEntry.newBuilder();
AdminProtos.ReplicateWALEntryRequest.Builder builder =
@@ -128,13 +139,55 @@ public static void replicateWALEntry(final AdminService.BlockingInterface admin,
keyBuilder.addScopes(scopeBuilder.build());
}
}
- List<KeyValue> keyValues = edit.getKeyValues();
- for (KeyValue value: keyValues) {
- entryBuilder.addKeyValueBytes(ByteString.copyFrom(
- value.getBuffer(), value.getOffset(), value.getLength()));
+ List<KeyValue> kvs = edit.getKeyValues();
+ // Add up the size. It is used later serializing out the kvs.
+ for (KeyValue kv: kvs) {
+ size += kv.getLength();
}
+ // Collect up the kvs
+ allkvs.add(kvs);
+ // Write out how many kvs associated with this entry.
+ entryBuilder.setAssociatedCellCount(kvs.size());
builder.addEntry(entryBuilder.build());
}
- return builder.build();
+ return new Pair<AdminProtos.ReplicateWALEntryRequest, CellScanner>(builder.build(),
+ getCellScanner(allkvs, size));
+ }
+
+ /**
+ * @param cells
+ * @return <code>cells</code> packaged as a CellScanner
+ */
+ static CellScanner getCellScanner(final List<List<? extends Cell>> cells, final int size) {
+ return new SizedCellScanner() {
+ private final Iterator<List<? extends Cell>> entries = cells.iterator();
+ private Iterator<? extends Cell> currentIterator = null;
+ private Cell currentCell;
+
+ @Override
+ public Cell current() {
+ return this.currentCell;
+ }
+
+ @Override
+ public boolean advance() {
+ if (this.currentIterator == null) {
+ if (!this.entries.hasNext()) return false;
+ this.currentIterator = this.entries.next().iterator();
+ }
+ if (this.currentIterator.hasNext()) {
+ this.currentCell = this.currentIterator.next();
+ return true;
+ }
+ this.currentCell = null;
+ this.currentIterator = null;
+ return advance();
+ }
+
+ @Override
+ public long heapSize() {
+ return size;
+ }
+ };
}
-}
+}
\ No newline at end of file
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index f288d887b544..cd80babc9af5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -2219,8 +2219,7 @@ static private void createNewReplicationInstance(Configuration conf,
conf, server, fs, logDir, oldLogDir);
server.replicationSinkHandler = (ReplicationSinkService)
server.replicationSourceHandler;
- }
- else {
+ } else {
server.replicationSourceHandler = (ReplicationSourceService)
newReplicationInstance(sourceClassname,
conf, server, fs, logDir, oldLogDir);
@@ -3715,15 +3714,14 @@ public CompactRegionResponse compactRegion(final RpcController controller,
@Override
@QosPriority(priority=HConstants.REPLICATION_QOS)
public ReplicateWALEntryResponse replicateWALEntry(final RpcController controller,
- final ReplicateWALEntryRequest request) throws ServiceException {
+ final ReplicateWALEntryRequest request)
+ throws ServiceException {
try {
if (replicationSinkHandler != null) {
checkOpen();
requestCount.increment();
- HLog.Entry[] entries = ReplicationProtbufUtil.toHLogEntries(request.getEntryList());
- if (entries != null && entries.length > 0) {
- replicationSinkHandler.replicateLogEntries(entries);
- }
+ this.replicationSinkHandler.replicateLogEntries(request.getEntryList(),
+ ((PayloadCarryingRpcController)controller).cellScanner());
}
return ReplicateWALEntryResponse.newBuilder().build();
} catch (IOException ie) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationSinkService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationSinkService.java
index 754cff0b0585..28573bd0b351 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationSinkService.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationSinkService.java
@@ -19,9 +19,11 @@
package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
+import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.regionserver.wal.HLog;
+import org.apache.hadoop.hbase.CellScanner;
+import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry;
/**
* A sink for a replication stream has to expose this service.
@@ -30,11 +32,11 @@
*/
@InterfaceAudience.Private
public interface ReplicationSinkService extends ReplicationService {
-
- /**
+ /**
* Carry on the list of log entries down to the sink
- * @param entries list of entries to replicate
+ * @param entries list of WALEntries to replicate
+ * @param cells Cells that the WALEntries refer to (if cells is non-null)
* @throws IOException
*/
- public void replicateLogEntries(HLog.Entry[] entries) throws IOException;
-}
+ public void replicateLogEntries(List<WALEntry> entries, CellScanner cells) throws IOException;
+}
\ No newline at end of file
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationSourceService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationSourceService.java
index b9083239fd44..edc5c6ad32a4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationSourceService.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationSourceService.java
@@ -18,8 +18,6 @@
*/
package org.apache.hadoop.hbase.regionserver;
-import java.io.IOException;
-
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
@@ -30,10 +28,9 @@
*/
@InterfaceAudience.Private
public interface ReplicationSourceService extends ReplicationService {
-
/**
* Returns a WALObserver for the service. This is needed to
* observe log rolls and log archival events.
*/
public WALActionsListener getWALActionsListener();
-}
+}
\ No newline at end of file
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
index 7b84ccdfb257..387f44dd0df0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.hbase.replication.regionserver;
import java.io.IOException;
+import java.util.List;
import java.util.NavigableMap;
import java.util.TreeMap;
import java.util.concurrent.Executors;
@@ -33,13 +34,14 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Server;
+import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry;
import org.apache.hadoop.hbase.regionserver.ReplicationSourceService;
import org.apache.hadoop.hbase.regionserver.ReplicationSinkService;
-import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
@@ -163,11 +165,14 @@ public void join() {
/**
* Carry on the list of log entries down to the sink
* @param entries list of entries to replicate
+ * @param cells The data -- the cells -- that <code>entries</code> describes (the entries
+ * do not contain the Cells we are replicating; they are passed here on the side in this
+ * CellScanner).
* @throws IOException
*/
- public void replicateLogEntries(HLog.Entry[] entries) throws IOException {
+ public void replicateLogEntries(List<WALEntry> entries, CellScanner cells) throws IOException {
if (this.replication) {
- this.replicationSink.replicateEntries(entries);
+ this.replicationSink.replicateEntries(entries, cells);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
index 67d921ce4c85..0e98b21794c2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
@@ -34,19 +34,23 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellScanner;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.HConnection;
import org.apache.hadoop.hbase.client.HConnectionManager;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Row;
-import org.apache.hadoop.hbase.regionserver.wal.HLog;
-import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
+import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Threads;
@@ -108,17 +112,17 @@ private void decorateConf() {
}
/**
- * Replicate this array of entries directly into the local cluster
- * using the native client.
+ * Replicate this array of entries directly into the local cluster using the native client.
+ * Like {@link #replicateEntries(org.apache.hadoop.hbase.regionserver.wal.HLog.Entry[])} only
+ * operates against raw protobuf type saving on a convertion from pb to pojo.
*
* @param entries
+ * @param cells
* @throws IOException
*/
- public void replicateEntries(HLog.Entry[] entries)
- throws IOException {
- if (entries.length == 0) {
- return;
- }
+ public void replicateEntries(List<WALEntry> entries, final CellScanner cells) throws IOException {
+ if (entries.isEmpty()) return;
+ if (cells == null) throw new NullPointerException("TODO: Add handling of null CellScanner");
// Very simple optimization where we batch sequences of rows going
// to the same table.
try {
@@ -126,40 +130,41 @@ public void replicateEntries(HLog.Entry[] entries)
// Map of table => list of Rows, we only want to flushCommits once per
// invocation of this method per table.
Map<byte[], List<Row>> rows = new TreeMap<byte[], List<Row>>(Bytes.BYTES_COMPARATOR);
- for (HLog.Entry entry : entries) {
- WALEdit edit = entry.getEdit();
- byte[] table = entry.getKey().getTablename();
- Put put = null;
- Delete del = null;
- KeyValue lastKV = null;
- List<KeyValue> kvs = edit.getKeyValues();
- for (KeyValue kv : kvs) {
- if (lastKV == null || lastKV.getType() != kv.getType() || !lastKV.matchingRow(kv)) {
- if (kv.isDelete()) {
- del = new Delete(kv.getRow());
- del.setClusterId(entry.getKey().getClusterId());
- addToMultiMap(rows, table, del);
- } else {
- put = new Put(kv.getRow());
- put.setClusterId(entry.getKey().getClusterId());
- addToMultiMap(rows, table, put);
- }
+ for (WALEntry entry : entries) {
+ byte[] table = entry.getKey().getTableName().toByteArray();
+ Cell previousCell = null;
+ Mutation m = null;
+ java.util.UUID uuid = toUUID(entry.getKey().getClusterId());
+ int count = entry.getAssociatedCellCount();
+ for (int i = 0; i < count; i++) {
+ // Throw index out of bounds if our cell count is off
+ if (!cells.advance()) {
+ throw new ArrayIndexOutOfBoundsException("Expected=" + count + ", index=" + i);
}
- if (kv.isDelete()) {
- del.addDeleteMarker(kv);
+ Cell cell = cells.current();
+ if (isNewRowOrType(previousCell, cell)) {
+ // Create new mutation
+ m = CellUtil.isDelete(cell)?
+ new Delete(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()):
+ new Put(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
+ m.setClusterId(uuid);
+ addToMultiMap(rows, table, m);
+ }
+ if (CellUtil.isDelete(cell)) {
+ ((Delete)m).addDeleteMarker(KeyValueUtil.ensureKeyValue(cell));
} else {
- put.add(kv);
+ ((Put)m).add(KeyValueUtil.ensureKeyValue(cell));
}
- lastKV = kv;
+ previousCell = cell;
}
totalReplicated++;
}
for (Entry<byte[], List<Row>> entry : rows.entrySet()) {
batch(entry.getKey(), entry.getValue());
}
- this.metrics.setAgeOfLastAppliedOp(
- entries[entries.length-1].getKey().getWriteTime());
- this.metrics.applyBatch(entries.length);
+ int size = entries.size();
+ this.metrics.setAgeOfLastAppliedOp(entries.get(size - 1).getKey().getWriteTime());
+ this.metrics.applyBatch(size);
this.totalReplicatedEdits.addAndGet(totalReplicated);
} catch (IOException ex) {
LOG.error("Unable to accept edit because:", ex);
@@ -167,6 +172,20 @@ public void replicateEntries(HLog.Entry[] entries)
}
}
+ /**
+ * @param previousCell
+ * @param cell
+ * @return True if we have crossed over onto a new row or type
+ */
+ private boolean isNewRowOrType(final Cell previousCell, final Cell cell) {
+ return previousCell == null || previousCell.getTypeByte() != cell.getTypeByte() ||
+ !CellUtil.matchingRow(previousCell, cell);
+ }
+
+ private java.util.UUID toUUID(final HBaseProtos.UUID uuid) {
+ return new java.util.UUID(uuid.getMostSigBits(), uuid.getLeastSigBits());
+ }
+
/**
* Simple helper to a map from key to (a list of) values
* TODO: Make a general utility method
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
index fb15ce436682..bf9266b5f65e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
@@ -177,6 +177,9 @@ public void init(final Configuration conf,
new PriorityBlockingQueue<Path>(
conf.getInt("hbase.regionserver.maxlogs", 32),
new LogsComparator());
+ // TODO: This connection is replication specific or we should make it particular to
+ // replication and make replication specific settings such as compression or codec to use
+ // passing Cells.
this.conn = HConnectionManager.getConnection(conf);
this.zkHelper = manager.getRepZkWrapper();
this.ratio = this.conf.getFloat("replication.source.ratio", 0.1f);
@@ -456,7 +459,6 @@ private void connectToPeers() {
// Connect to peer cluster first, unless we have to stop
while (this.isActive() && this.currentPeers.size() == 0) {
-
chooseSinks();
if (this.isActive() && this.currentPeers.size() == 0) {
if (sleepForRetries("Waiting for peers", sleepMultiplier)) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java
new file mode 100644
index 000000000000..6aea022eec6a
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.protobuf;
+
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellScanner;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+
+@Category(SmallTests.class)
+public class TestReplicationProtobuf {
+ /**
+ * Little test to check we can basically convert list of a list of KVs into a CellScanner
+ * @throws IOException
+ */
+ @Test
+ public void testGetCellScanner() throws IOException {
+ List<KeyValue> a = new ArrayList<KeyValue>();
+ KeyValue akv = new KeyValue(Bytes.toBytes("a"), -1L);
+ a.add(akv);
+ // Add a few just to make it less regular.
+ a.add(new KeyValue(Bytes.toBytes("aa"), -1L));
+ a.add(new KeyValue(Bytes.toBytes("aaa"), -1L));
+ List<KeyValue> b = new ArrayList<KeyValue>();
+ KeyValue bkv = new KeyValue(Bytes.toBytes("b"), -1L);
+ a.add(bkv);
+ List<KeyValue> c = new ArrayList<KeyValue>();
+ KeyValue ckv = new KeyValue(Bytes.toBytes("c"), -1L);
+ c.add(ckv);
+ List<List<? extends Cell>> all = new ArrayList<List<? extends Cell>>();
+ all.add(a);
+ all.add(b);
+ all.add(c);
+ CellScanner scanner = ReplicationProtbufUtil.getCellScanner(all, 0);
+ testAdvancetHasSameRow(scanner, akv);
+ // Skip over aa
+ scanner.advance();
+ // Skip over aaa
+ scanner.advance();
+ testAdvancetHasSameRow(scanner, bkv);
+ testAdvancetHasSameRow(scanner, ckv);
+ assertFalse(scanner.advance());
+ }
+
+ private void testAdvancetHasSameRow(CellScanner scanner, final KeyValue kv) throws IOException {
+ scanner.advance();
+ assertTrue(Bytes.equals(scanner.current().getRowArray(), scanner.current().getRowOffset(),
+ scanner.current().getRowLength(),
+ kv.getRowArray(), kv.getRowOffset(), kv.getRowLength()));
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
index 61ae5b58f887..ddbd56d79f6b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
@@ -449,13 +449,14 @@ public void loadTesting() throws Exception {
scan = new Scan();
+ long start = System.currentTimeMillis();
for (int i = 0; i < NB_RETRIES; i++) {
scanner = htable2.getScanner(scan);
res = scanner.next(NB_ROWS_IN_BIG_BATCH);
scanner.close();
if (res.length != NB_ROWS_IN_BIG_BATCH) {
- if (i == NB_RETRIES-1) {
+ if (i == NB_RETRIES - 1) {
int lastRow = -1;
for (Result result : res) {
int currentRow = Bytes.toInt(result.getRow());
@@ -465,8 +466,9 @@ public void loadTesting() throws Exception {
lastRow = currentRow;
}
LOG.error("Last row: " + lastRow);
- fail("Waited too much time for normal batch replication, "
- + res.length + " instead of " + NB_ROWS_IN_BIG_BATCH);
+ fail("Waited too much time for normal batch replication, " +
+ res.length + " instead of " + NB_ROWS_IN_BIG_BATCH + "; waited=" +
+ (System.currentTimeMillis() - start) + "ms");
} else {
LOG.info("Only got " + res.length + " rows");
Thread.sleep(SLEEP_TIME);
|
e54358472c94d63c66ad607be256f94378e8ff16
|
orientdb
|
Issue -2900, OFlatDatabase was removed.--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseBinary.java b/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseBinary.java
deleted file mode 100644
index 2ec9a2144fb..00000000000
--- a/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseBinary.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- *
- * * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com)
- * *
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * *
- * * For more information: http://www.orientechnologies.com
- *
- */
-package com.orientechnologies.orient.core.db.record;
-
-import com.orientechnologies.orient.core.record.impl.ORecordBytes;
-
-/**
- * Binary specialization of transactional database.
- *
- */
-public class ODatabaseBinary extends ODatabaseRecordTx {
-
- public ODatabaseBinary(String iURL) {
- super(iURL, ORecordBytes.RECORD_TYPE);
- }
-}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseFlat.java b/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseFlat.java
deleted file mode 100755
index 09bc14dfc99..00000000000
--- a/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseFlat.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- *
- * * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com)
- * *
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * *
- * * For more information: http://www.orientechnologies.com
- *
- */
-package com.orientechnologies.orient.core.db.record;
-
-import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
-import com.orientechnologies.orient.core.iterator.ORecordIteratorCluster;
-import com.orientechnologies.orient.core.record.impl.ORecordFlat;
-
-/**
- * Delegates all the CRUD operations to the current transaction.
- *
- */
-public class ODatabaseFlat extends ODatabaseRecordTx {
-
- public ODatabaseFlat(String iURL) {
- super(iURL, ORecordFlat.RECORD_TYPE);
- serializer = ODatabaseDocumentTx.getDefaultSerializer();
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public ORecordIteratorCluster<ORecordFlat> browseCluster(final String iClusterName) {
- return super.browseCluster(iClusterName, ORecordFlat.class);
- }
-
- @Override
- public ORecordIteratorCluster<ORecordFlat> browseCluster(String iClusterName, long startClusterPosition,
- long endClusterPosition, boolean loadTombstones) {
- return super.browseCluster(iClusterName, ORecordFlat.class, startClusterPosition, endClusterPosition, loadTombstones);
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public ORecordFlat newInstance() {
- return new ORecordFlat();
- }
-}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/record/impl/ORecordFlat.java b/core/src/main/java/com/orientechnologies/orient/core/record/impl/ORecordFlat.java
index 6db8b2e689b..b4701b13e49 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/record/impl/ORecordFlat.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/record/impl/ORecordFlat.java
@@ -20,7 +20,7 @@
package com.orientechnologies.orient.core.record.impl;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
-import com.orientechnologies.orient.core.db.record.ODatabaseFlat;
+import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.id.ORecordId;
@@ -39,7 +39,7 @@ public class ORecordFlat extends ORecordAbstract implements ORecordStringable {
public static final byte RECORD_TYPE = 'f';
protected String value;
- public ORecordFlat(ODatabaseFlat iDatabase) {
+ public ORecordFlat(ODatabaseDocumentTx iDatabase) {
this();
ODatabaseRecordThreadLocal.INSTANCE.set(iDatabase);
}
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDFlatPhysicalTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDFlatPhysicalTest.java
index 4bd522343d3..7e413faab08 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDFlatPhysicalTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDFlatPhysicalTest.java
@@ -18,18 +18,18 @@
import java.util.HashSet;
import java.util.Set;
-import com.orientechnologies.orient.core.storage.OStorage;
+import com.orientechnologies.orient.core.db.record.ODatabaseRecordAbstract;
+import com.orientechnologies.orient.core.iterator.ORecordIteratorCluster;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Optional;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
-import com.orientechnologies.orient.core.db.record.ODatabaseFlat;
import com.orientechnologies.orient.core.record.impl.ORecordFlat;
@Test(groups = { "crud", "record-csv" }, sequential = true)
-public class CRUDFlatPhysicalTest extends FlatDBBaseTest {
+public class CRUDFlatPhysicalTest extends DocumentDBBaseTest {
private static final String CLUSTER_NAME = "binary";
protected static final int TOT_RECORDS = 100;
@@ -41,16 +41,16 @@ public CRUDFlatPhysicalTest(@Optional String url) {
super(url);
}
- @BeforeClass
- @Override
- public void beforeClass() throws Exception {
- super.beforeClass();
- record = database.newInstance();
- }
+ @BeforeClass
+ @Override
+ public void beforeClass() throws Exception {
+ super.beforeClass();
+ record = new ORecordFlat();
+ }
- public void createRaw() {
- if (database.getClusterIdByName(CLUSTER_NAME) < 0)
- database.addCluster(CLUSTER_NAME);
+ public void createRaw() {
+ if (database.getClusterIdByName(CLUSTER_NAME) < 0)
+ database.addCluster(CLUSTER_NAME);
startRecordNumber = database.countClusterElements(CLUSTER_NAME);
@@ -73,7 +73,8 @@ public void readRawWithExpressiveForwardIterator() {
for (int i = 0; i < TOT_RECORDS; i++)
ids.add(i);
- for (ORecordFlat rec : database.browseCluster(CLUSTER_NAME)) {
+ for (ORecordFlat rec : new ORecordIteratorCluster<ORecordFlat>(database, (ODatabaseRecordAbstract) database.getUnderlying(),
+ database.getClusterIdByName(CLUSTER_NAME), true)) {
fields = rec.value().split("-");
int i = Integer.parseInt(fields[0]);
@@ -87,7 +88,8 @@ public void readRawWithExpressiveForwardIterator() {
public void updateRaw() {
String[] fields;
- for (ORecordFlat rec : database.browseCluster(CLUSTER_NAME)) {
+ for (ORecordFlat rec : new ORecordIteratorCluster<ORecordFlat>(database, (ODatabaseRecordAbstract) database.getUnderlying(),
+ database.getClusterIdByName(CLUSTER_NAME), true)) {
fields = rec.value().split("-");
int i = Integer.parseInt(fields[0]);
if (i % 2 == 0) {
@@ -105,7 +107,8 @@ public void testUpdateRaw() {
for (int i = 0; i < TOT_RECORDS; i++)
ids.add(i);
- for (ORecordFlat rec : database.browseCluster(CLUSTER_NAME)) {
+ for (ORecordFlat rec : new ORecordIteratorCluster<ORecordFlat>(database, (ODatabaseRecordAbstract) database.getUnderlying(),
+ database.getClusterIdByName(CLUSTER_NAME), true)) {
fields = rec.value().split("-");
int i = Integer.parseInt(fields[0]);
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DictionaryTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DictionaryTest.java
index a4c3c2ad1a4..54383db3c65 100755
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DictionaryTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DictionaryTest.java
@@ -23,7 +23,6 @@
import org.testng.annotations.Test;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
-import com.orientechnologies.orient.core.db.record.ODatabaseFlat;
import com.orientechnologies.orient.core.record.ORecord;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ORecordFlat;
@@ -40,49 +39,32 @@ public DictionaryTest(@Optional String url) {
}
public void testDictionaryCreate() throws IOException {
- ODatabaseFlat database = new ODatabaseFlat(url);
- database.open("admin", "admin");
- ORecordFlat record = database.newInstance();
+ ORecordFlat record = new ORecordFlat();
database.getDictionary().put("key1", record.value("Dictionary test!"));
-
- database.close();
}
@Test(dependsOnMethods = "testDictionaryCreate")
public void testDictionaryLookup() throws IOException {
- ODatabaseFlat database = new ODatabaseFlat(url);
- database.open("admin", "admin");
-
Assert.assertNotNull(database.getDictionary().get("key1"));
Assert.assertTrue(((ORecordFlat) database.getDictionary().get("key1")).value().equals("Dictionary test!"));
-
- database.close();
}
@Test(dependsOnMethods = "testDictionaryLookup")
public void testDictionaryUpdate() throws IOException {
- ODatabaseFlat database = new ODatabaseFlat(url);
- database.open("admin", "admin");
-
final long originalSize = database.getDictionary().size();
- database.getDictionary().put("key1", database.newInstance().value("Text changed"));
+ database.getDictionary().put("key1", new ORecordFlat().value("Text changed"));
database.close();
database.open("admin", "admin");
Assert.assertEquals(((ORecordFlat) database.getDictionary().get("key1")).value(), "Text changed");
Assert.assertEquals(database.getDictionary().size(), originalSize);
-
- database.close();
}
@Test(dependsOnMethods = "testDictionaryUpdate")
public void testDictionaryDelete() throws IOException {
- ODatabaseFlat database = new ODatabaseFlat(url);
- database.open("admin", "admin");
-
final long originalSize = database.getDictionary().size();
Assert.assertNotNull(database.getDictionary().remove("key1"));
@@ -90,22 +72,17 @@ public void testDictionaryDelete() throws IOException {
database.open("admin", "admin");
Assert.assertEquals(database.getDictionary().size(), originalSize - 1);
-
- database.close();
}
@Test(dependsOnMethods = "testDictionaryDelete")
public void testDictionaryMassiveCreate() throws IOException {
- ODatabaseFlat database = new ODatabaseFlat(url);
- database.open("admin", "admin");
-
final long originalSize = database.getDictionary().size();
// ASSURE TO STORE THE PAGE-SIZE + 3 FORCING THE CREATION OF LEFT AND RIGHT
final int total = 1000;
for (int i = total; i > 0; --i) {
- database.getDictionary().put("key-" + (originalSize + i), database.newInstance().value("test-dictionary-" + i));
+ database.getDictionary().put("key-" + (originalSize + i), new ORecordFlat().value("test-dictionary-" + i));
}
for (int i = total; i > 0; --i) {
@@ -114,22 +91,15 @@ public void testDictionaryMassiveCreate() throws IOException {
}
Assert.assertEquals(database.getDictionary().size(), originalSize + total);
-
- database.close();
}
@Test(dependsOnMethods = "testDictionaryMassiveCreate")
public void testDictionaryInTx() throws IOException {
- ODatabaseFlat database = new ODatabaseFlat(url);
- database.open("admin", "admin");
-
database.begin();
- database.getDictionary().put("tx-key", database.newInstance().value("tx-test-dictionary"));
+ database.getDictionary().put("tx-key", new ORecordFlat().value("tx-test-dictionary"));
database.commit();
Assert.assertNotNull(database.getDictionary().get("tx-key"));
-
- database.close();
}
public class ObjectDictionaryTest {
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/FlatDBBaseTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/FlatDBBaseTest.java
deleted file mode 100644
index fe45fa706d6..00000000000
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/FlatDBBaseTest.java
+++ /dev/null
@@ -1,31 +0,0 @@
-package com.orientechnologies.orient.test.database.auto;
-
-import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
-import com.orientechnologies.orient.core.db.record.ODatabaseFlat;
-import org.testng.annotations.Optional;
-import org.testng.annotations.Parameters;
-
-/**
- * @author Andrey Lomakin <a href="mailto:[email protected]">Andrey Lomakin</a>
- * @since 7/10/14
- */
-public abstract class FlatDBBaseTest extends BaseTest<ODatabaseFlat> {
- @Parameters(value = "url")
- protected FlatDBBaseTest(@Optional String url) {
- super(url);
- }
-
- @Override
- protected ODatabaseFlat createDatabaseInstance(String url) {
- return new ODatabaseFlat(url);
- }
-
- @Override
- protected void createDatabase() {
- ODatabaseDocumentTx db = new ODatabaseDocumentTx(database.getURL());
- db.create();
- db.close();
-
- database.open("admin", "admin");
- }
-}
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/TransactionAtomicTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/TransactionAtomicTest.java
index 391133364b1..e06eef77857 100755
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/TransactionAtomicTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/TransactionAtomicTest.java
@@ -25,7 +25,6 @@
import com.orientechnologies.orient.core.db.ODatabase;
import com.orientechnologies.orient.core.db.ODatabaseListener;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
-import com.orientechnologies.orient.core.db.record.ODatabaseFlat;
import com.orientechnologies.orient.core.exception.OConcurrentModificationException;
import com.orientechnologies.orient.core.exception.OTransactionException;
import com.orientechnologies.orient.core.metadata.schema.OClass;
@@ -45,13 +44,13 @@ public TransactionAtomicTest(@Optional String url) {
@Test
public void testTransactionAtomic() throws IOException {
- ODatabaseFlat db1 = new ODatabaseFlat(url);
+ ODatabaseDocumentTx db1 = new ODatabaseDocumentTx(url);
db1.open("admin", "admin");
- ODatabaseFlat db2 = new ODatabaseFlat(url);
+ ODatabaseDocumentTx db2 = new ODatabaseDocumentTx(url);
db2.open("admin", "admin");
- ORecordFlat record1 = new ORecordFlat(db1);
+ ORecordFlat record1 = new ORecordFlat();
record1.value("This is the first version").save();
// RE-READ THE RECORD
@@ -91,7 +90,7 @@ public void testMVCC() throws IOException {
@Test(expectedExceptions = OTransactionException.class)
public void testTransactionPreListenerRollback() throws IOException {
- ODatabaseFlat db = new ODatabaseFlat(url);
+ ODatabaseDocumentTx db = new ODatabaseDocumentTx(url);
db.open("admin", "admin");
ORecordFlat record1 = new ORecordFlat(db);
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/CreateRelationshipsSpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/CreateRelationshipsSpeedTest.java
index 080197a4492..5c9b0944325 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/CreateRelationshipsSpeedTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/CreateRelationshipsSpeedTest.java
@@ -19,12 +19,11 @@
import java.io.UnsupportedEncodingException;
import com.orientechnologies.common.test.SpeedTestMonoThread;
-import com.orientechnologies.orient.core.db.record.ODatabaseFlat;
+import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.record.impl.ORecordFlat;
-import com.orientechnologies.orient.core.storage.OStorage;
public class CreateRelationshipsSpeedTest extends SpeedTestMonoThread {
- private ODatabaseFlat database;
+ private ODatabaseDocumentTx database;
private ORecordFlat record;
public CreateRelationshipsSpeedTest() {
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryLookupInverseSpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryLookupInverseSpeedTest.java
index 095137fffe6..a94eddda3a0 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryLookupInverseSpeedTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryLookupInverseSpeedTest.java
@@ -17,17 +17,17 @@
import java.io.UnsupportedEncodingException;
+import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.orientechnologies.orient.core.Orient;
-import com.orientechnologies.orient.core.db.record.ODatabaseFlat;
import com.orientechnologies.orient.core.record.impl.ORecordFlat;
import com.orientechnologies.orient.test.database.base.OrientMonoThreadTest;
@Test(enabled = false)
public class DictionaryLookupInverseSpeedTest extends OrientMonoThreadTest {
- private ODatabaseFlat database;
+ private ODatabaseDocumentTx database;
public static void main(String[] iArgs) throws InstantiationException, IllegalAccessException {
DictionaryLookupInverseSpeedTest test = new DictionaryLookupInverseSpeedTest();
@@ -37,7 +37,7 @@ public static void main(String[] iArgs) throws InstantiationException, IllegalAc
public DictionaryLookupInverseSpeedTest() {
super(100000);
Orient.instance().getProfiler().startRecording();
- database = new ODatabaseFlat(System.getProperty("url")).open("admin", "admin");
+ database = new ODatabaseDocumentTx(System.getProperty("url")).open("admin", "admin");
}
@Override
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryLookupSpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryLookupSpeedTest.java
index 8a660404859..fd1b3dd6610 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryLookupSpeedTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryLookupSpeedTest.java
@@ -17,17 +17,17 @@
import java.io.UnsupportedEncodingException;
+import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.orientechnologies.orient.core.Orient;
-import com.orientechnologies.orient.core.db.record.ODatabaseFlat;
import com.orientechnologies.orient.core.record.impl.ORecordFlat;
import com.orientechnologies.orient.test.database.base.OrientMonoThreadTest;
@Test(enabled = false)
public class DictionaryLookupSpeedTest extends OrientMonoThreadTest {
- private ODatabaseFlat database;
+ private ODatabaseDocumentTx database;
public static void main(String[] iArgs) throws InstantiationException, IllegalAccessException {
DictionaryLookupSpeedTest test = new DictionaryLookupSpeedTest();
@@ -37,7 +37,7 @@ public static void main(String[] iArgs) throws InstantiationException, IllegalAc
public DictionaryLookupSpeedTest() {
super(100000);
Orient.instance().getProfiler().startRecording();
- database = new ODatabaseFlat(System.getProperty("url")).open("admin", "admin");
+ database = new ODatabaseDocumentTx(System.getProperty("url")).open("admin", "admin");
}
@Override
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryPutSpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryPutSpeedTest.java
index ab7d510ef1e..684a385244a 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryPutSpeedTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/DictionaryPutSpeedTest.java
@@ -15,10 +15,10 @@
*/
package com.orientechnologies.orient.test.database.speed;
+import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import org.testng.annotations.Test;
import com.orientechnologies.orient.core.Orient;
-import com.orientechnologies.orient.core.db.record.ODatabaseFlat;
import com.orientechnologies.orient.core.intent.OIntentMassiveInsert;
import com.orientechnologies.orient.core.record.impl.ORecordFlat;
import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE;
@@ -26,7 +26,7 @@
@Test(enabled = false)
public class DictionaryPutSpeedTest extends OrientMonoThreadTest {
- private ODatabaseFlat database;
+ private ODatabaseDocumentTx database;
private ORecordFlat record;
private long startNum;
@@ -40,10 +40,10 @@ public DictionaryPutSpeedTest() throws InstantiationException, IllegalAccessExce
super(1000000);
String url = System.getProperty("url");
- database = new ODatabaseFlat(url).open("admin", "admin");
+ database = new ODatabaseDocumentTx(url).open("admin", "admin");
database.declareIntent(new OIntentMassiveInsert());
- record = database.newInstance();
+ record = new ORecordFlat();
startNum = 0;// database.countClusterElements("Animal");
Orient.instance().getProfiler().startRecording();
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateBinarySpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateBinarySpeedTest.java
index 7c2c3de9567..711877b3895 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateBinarySpeedTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateBinarySpeedTest.java
@@ -17,10 +17,10 @@
import java.util.Random;
+import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import org.testng.annotations.Test;
import com.orientechnologies.orient.core.Orient;
-import com.orientechnologies.orient.core.db.record.ODatabaseFlat;
import com.orientechnologies.orient.core.intent.OIntentMassiveInsert;
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE;
@@ -28,7 +28,7 @@
@Test(enabled = false)
public class LocalCreateBinarySpeedTest extends OrientMonoThreadTest {
- private ODatabaseFlat database;
+ private ODatabaseDocumentTx database;
private ORecordBytes record;
private final static int RECORD_SIZE = 512;
private byte[] recordContent;
@@ -46,7 +46,7 @@ public LocalCreateBinarySpeedTest() throws InstantiationException, IllegalAccess
public void init() {
Orient.instance().getProfiler().startRecording();
- database = new ODatabaseFlat(System.getProperty("url")).open("admin", "admin");
+ database = new ODatabaseDocumentTx(System.getProperty("url")).open("admin", "admin");
record = new ORecordBytes();
database.declareIntent(new OIntentMassiveInsert());
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateFlatMultiThreadSpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateFlatMultiThreadSpeedTest.java
index 80196ce13b2..36d663b78f8 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateFlatMultiThreadSpeedTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateFlatMultiThreadSpeedTest.java
@@ -15,11 +15,11 @@
*/
package com.orientechnologies.orient.test.database.speed;
+import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.orientechnologies.common.test.SpeedTestMultiThreads;
-import com.orientechnologies.orient.core.db.record.ODatabaseFlat;
import com.orientechnologies.orient.core.intent.OIntentMassiveInsert;
import com.orientechnologies.orient.core.record.impl.ORecordFlat;
import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE;
@@ -28,12 +28,12 @@
@Test(enabled = false)
public class LocalCreateFlatMultiThreadSpeedTest extends OrientMultiThreadTest {
- protected ODatabaseFlat database;
+ protected ODatabaseDocumentTx database;
private long foundObjects;
@Test(enabled = false)
public static class CreateObjectsThread extends OrientThreadTest {
- protected ODatabaseFlat database;
+ protected ODatabaseDocumentTx database;
protected ORecordFlat record;
public CreateObjectsThread(final SpeedTestMultiThreads parent, final int threadId) {
@@ -42,8 +42,8 @@ public CreateObjectsThread(final SpeedTestMultiThreads parent, final int threadI
@Override
public void init() {
- database = new ODatabaseFlat(System.getProperty("url")).open("admin", "admin");
- record = database.newInstance();
+ database = new ODatabaseDocumentTx(System.getProperty("url")).open("admin", "admin");
+ record = new ORecordFlat();
database.declareIntent(new OIntentMassiveInsert());
database.begin(TXTYPE.NOTX);
}
@@ -75,7 +75,7 @@ public static void main(String[] iArgs) throws InstantiationException, IllegalAc
@Override
public void init() {
- database = new ODatabaseFlat(System.getProperty("url")).open("admin", "admin");
+ database = new ODatabaseDocumentTx(System.getProperty("url")).open("admin", "admin");
foundObjects = database.countClusterElements("flat");
System.out.println("\nTotal objects in Animal cluster before the test: " + foundObjects);
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateFlatSpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateFlatSpeedTest.java
index 11efd77aefa..800a7eaa423 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateFlatSpeedTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateFlatSpeedTest.java
@@ -15,11 +15,11 @@
*/
package com.orientechnologies.orient.test.database.speed;
+import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import org.testng.annotations.Test;
import com.orientechnologies.orient.core.Orient;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
-import com.orientechnologies.orient.core.db.record.ODatabaseFlat;
import com.orientechnologies.orient.core.intent.OIntentMassiveInsert;
import com.orientechnologies.orient.core.record.impl.ORecordFlat;
import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE;
@@ -27,7 +27,7 @@
@Test(enabled = false)
public class LocalCreateFlatSpeedTest extends OrientMonoThreadTest {
- private ODatabaseFlat database;
+ private ODatabaseDocumentTx database;
private ORecordFlat record;
private long date = System.currentTimeMillis();
@@ -45,13 +45,13 @@ public LocalCreateFlatSpeedTest() throws InstantiationException, IllegalAccessEx
public void init() {
Orient.instance().getProfiler().startRecording();
- database = new ODatabaseFlat(System.getProperty("url"));
+ database = new ODatabaseDocumentTx(System.getProperty("url"));
if (database.exists())
database.open("admin", "admin");
else
database.create();
- record = database.newInstance();
+ record = new ORecordFlat();
database.declareIntent(new OIntentMassiveInsert());
database.begin(TXTYPE.NOTX);
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxRemoteCreateFlatSpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxRemoteCreateFlatSpeedTest.java
index 3d843ccf822..ff0cbd611ab 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxRemoteCreateFlatSpeedTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxRemoteCreateFlatSpeedTest.java
@@ -15,16 +15,16 @@
*/
package com.orientechnologies.orient.test.database.speed;
+import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import org.testng.annotations.Test;
import com.orientechnologies.orient.core.Orient;
-import com.orientechnologies.orient.core.db.record.ODatabaseFlat;
import com.orientechnologies.orient.core.record.impl.ORecordFlat;
import com.orientechnologies.orient.test.database.base.OrientMonoThreadTest;
@Test(enabled = false)
public class TxRemoteCreateFlatSpeedTest extends OrientMonoThreadTest {
- private ODatabaseFlat database;
+ private ODatabaseDocumentTx database;
private ORecordFlat record;
public static void main(String[] iArgs) throws InstantiationException, IllegalAccessException {
@@ -40,8 +40,8 @@ public TxRemoteCreateFlatSpeedTest() throws InstantiationException, IllegalAcces
public void init() {
Orient.instance().getProfiler().startRecording();
- database = new ODatabaseFlat(System.getProperty("url")).open("admin", "admin");
- record = database.newInstance();
+ database = new ODatabaseDocumentTx(System.getProperty("url")).open("admin", "admin");
+ record = new ORecordFlat();
database.begin();
}
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxRemoteCreateObjectsMultiThreadSpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxRemoteCreateObjectsMultiThreadSpeedTest.java
index a041b4f5281..3d3c316871c 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxRemoteCreateObjectsMultiThreadSpeedTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxRemoteCreateObjectsMultiThreadSpeedTest.java
@@ -16,20 +16,19 @@
package com.orientechnologies.orient.test.database.speed;
import com.orientechnologies.common.test.SpeedTestMultiThreads;
-import com.orientechnologies.orient.core.db.record.ODatabaseFlat;
+import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.record.impl.ORecordFlat;
-import com.orientechnologies.orient.core.storage.OStorage;
import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE;
import com.orientechnologies.orient.test.database.base.OrientMultiThreadTest;
import com.orientechnologies.orient.test.database.base.OrientThreadTest;
public class TxRemoteCreateObjectsMultiThreadSpeedTest extends OrientMultiThreadTest {
- protected ODatabaseFlat database;
- protected long foundObjects;
+ protected ODatabaseDocumentTx database;
+ protected long foundObjects;
public static class CreateObjectsThread extends OrientThreadTest {
- protected ODatabaseFlat database;
- protected ORecordFlat record = new ORecordFlat();
+ protected ODatabaseDocumentTx database;
+ protected ORecordFlat record = new ORecordFlat();
public CreateObjectsThread(final SpeedTestMultiThreads parent, final int threadId) {
super(parent, threadId);
@@ -37,8 +36,8 @@ public CreateObjectsThread(final SpeedTestMultiThreads parent, final int threadI
@Override
public void init() {
- database = new ODatabaseFlat(System.getProperty("url")).open("admin", "admin");
- record = database.newInstance();
+ database = new ODatabaseDocumentTx(System.getProperty("url")).open("admin", "admin");
+ record = new ORecordFlat();
database.begin(TXTYPE.NOTX);
}
@@ -69,7 +68,7 @@ public static void main(String[] iArgs) throws InstantiationException, IllegalAc
@Override
public void init() {
- database = new ODatabaseFlat(System.getProperty("url")).open("admin", "admin");
+ database = new ODatabaseDocumentTx(System.getProperty("url")).open("admin", "admin");
if (!database.getStorage().getClusterNames().contains("Animal"))
database.addCluster("Animal");
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxTest.java
index 151dad1d413..1a5ec437302 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/TxTest.java
@@ -17,22 +17,22 @@
import java.io.UnsupportedEncodingException;
+import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import org.testng.annotations.Test;
-import com.orientechnologies.orient.core.db.record.ODatabaseFlat;
import com.orientechnologies.orient.core.record.impl.ORecordFlat;
import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE;
import com.orientechnologies.orient.test.database.base.OrientMonoThreadTest;
@Test(enabled = false)
public class TxTest extends OrientMonoThreadTest {
- private ODatabaseFlat database;
+ private ODatabaseDocumentTx database;
private ORecordFlat record;
public TxTest() throws InstantiationException, IllegalAccessException {
super(10);
- database = new ODatabaseFlat(System.getProperty("url")).open("admin", "admin");
- record = database.newInstance();
+ database = new ODatabaseDocumentTx(System.getProperty("url")).open("admin", "admin");
+ record = new ORecordFlat();
database.begin(TXTYPE.OPTIMISTIC);
}
|
d9d3f68a390494af15e49060b7afd9924dbcbee1
|
kotlin
|
Drop JetClassObject element and its usages--as class objects are now represented by JetObjectDeclaration element-
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/backend/src/org/jetbrains/kotlin/codegen/ClassBodyCodegen.java b/compiler/backend/src/org/jetbrains/kotlin/codegen/ClassBodyCodegen.java
index fcc1c54048206..1fd5dec89b76f 100644
--- a/compiler/backend/src/org/jetbrains/kotlin/codegen/ClassBodyCodegen.java
+++ b/compiler/backend/src/org/jetbrains/kotlin/codegen/ClassBodyCodegen.java
@@ -99,9 +99,6 @@ else if (declaration instanceof JetClassOrObject) {
genClassOrObject((JetClassOrObject) declaration);
}
- else if (declaration instanceof JetClassObject) {
- genClassOrObject(((JetClassObject) declaration).getObjectDeclaration());
- }
}
private void generatePrimaryConstructorProperties(PropertyCodegen propertyCodegen, JetClassOrObject origin) {
diff --git a/compiler/backend/src/org/jetbrains/kotlin/codegen/ImplementationBodyCodegen.java b/compiler/backend/src/org/jetbrains/kotlin/codegen/ImplementationBodyCodegen.java
index 6b89164c5665d..3a11a2f98a8bb 100644
--- a/compiler/backend/src/org/jetbrains/kotlin/codegen/ImplementationBodyCodegen.java
+++ b/compiler/backend/src/org/jetbrains/kotlin/codegen/ImplementationBodyCodegen.java
@@ -150,7 +150,7 @@ else if (jetClass.isEnum()) {
isStatic = !jetClass.isInner();
}
else {
- isStatic = myClass.getParent() instanceof JetClassObject;
+ isStatic = myClass instanceof JetObjectDeclaration && ((JetObjectDeclaration) myClass).isClassObject() ;
isFinal = true;
}
@@ -968,9 +968,9 @@ private void generateFieldForSingleton() {
fieldTypeDescriptor = descriptor;
}
else if (classObjectDescriptor != null) {
- JetClassObject classObject = ((JetClass) myClass).getClassObject();
+ JetObjectDeclaration classObject = ((JetClass) myClass).getClassObject();
assert classObject != null : "Class object not found: " + myClass.getText();
- original = classObject.getObjectDeclaration();
+ original = classObject;
fieldTypeDescriptor = classObjectDescriptor;
}
else {
diff --git a/compiler/backend/src/org/jetbrains/kotlin/codegen/binding/CodegenAnnotatingVisitor.java b/compiler/backend/src/org/jetbrains/kotlin/codegen/binding/CodegenAnnotatingVisitor.java
index e6a744a51d702..4f0990a4c578c 100644
--- a/compiler/backend/src/org/jetbrains/kotlin/codegen/binding/CodegenAnnotatingVisitor.java
+++ b/compiler/backend/src/org/jetbrains/kotlin/codegen/binding/CodegenAnnotatingVisitor.java
@@ -207,46 +207,23 @@ public void visitEnumEntry(@NotNull JetEnumEntry enumEntry) {
}
@Override
- public void visitClassObject(@NotNull JetClassObject classObject) {
- ClassDescriptor classDescriptor = bindingContext.get(CLASS, classObject.getObjectDeclaration());
+ public void visitObjectDeclaration(@NotNull JetObjectDeclaration declaration) {
+ if (!filter.shouldProcessClass(declaration)) return;
- assert classDescriptor != null : String.format("No class found in binding context for: \n---\n%s\n---\n",
- JetPsiUtil.getElementTextWithContext(classObject));
+ ClassDescriptor classDescriptor = bindingContext.get(CLASS, declaration);
+ // working around a problem with shallow analysis
+ if (classDescriptor == null) return;
- //TODO_R: remove visitClassObject
String name = getName(classDescriptor);
recordClosure(classDescriptor, name);
classStack.push(classDescriptor);
nameStack.push(name);
- super.visitClassObject(classObject);
+ super.visitObjectDeclaration(declaration);
nameStack.pop();
classStack.pop();
}
- @Override
- public void visitObjectDeclaration(@NotNull JetObjectDeclaration declaration) {
- if (declaration.getParent() instanceof JetClassObject) {
- super.visitObjectDeclaration(declaration);
- }
- else {
- if (!filter.shouldProcessClass(declaration)) return;
-
- ClassDescriptor classDescriptor = bindingContext.get(CLASS, declaration);
- // working around a problem with shallow analysis
- if (classDescriptor == null) return;
-
- String name = getName(classDescriptor);
- recordClosure(classDescriptor, name);
-
- classStack.push(classDescriptor);
- nameStack.push(name);
- super.visitObjectDeclaration(declaration);
- nameStack.pop();
- classStack.pop();
- }
- }
-
@Override
public void visitClass(@NotNull JetClass klass) {
if (!filter.shouldProcessClass(klass)) return;
diff --git a/compiler/backend/src/org/jetbrains/kotlin/codegen/binding/PsiCodegenPredictor.java b/compiler/backend/src/org/jetbrains/kotlin/codegen/binding/PsiCodegenPredictor.java
index e06ac2f4360b1..de0267dcbb49d 100644
--- a/compiler/backend/src/org/jetbrains/kotlin/codegen/binding/PsiCodegenPredictor.java
+++ b/compiler/backend/src/org/jetbrains/kotlin/codegen/binding/PsiCodegenPredictor.java
@@ -55,10 +55,6 @@ public static String getPredefinedJvmInternalName(@NotNull JetDeclaration declar
// TODO: Method won't give correct class name for traits implementations
JetDeclaration parentDeclaration = JetStubbedPsiUtil.getContainingDeclaration(declaration);
- if (parentDeclaration instanceof JetClassObject) {
- assert declaration instanceof JetObjectDeclaration : "Only object declarations can be children of JetClassObject: " + declaration;
- return getPredefinedJvmInternalName(parentDeclaration);
- }
String parentInternalName;
if (parentDeclaration != null) {
@@ -78,12 +74,6 @@ public static String getPredefinedJvmInternalName(@NotNull JetDeclaration declar
parentInternalName = AsmUtil.internalNameByFqNameWithoutInnerClasses(containingFile.getPackageFqName());
}
- if (declaration instanceof JetClassObject) {
- // Get parent and assign Class object prefix
- //TODO_R: getName() nullable
- return parentInternalName + "$" + ((JetClassObject) declaration).getObjectDeclaration().getName();
- }
-
if (!PsiTreeUtil.instanceOf(declaration, JetClass.class, JetObjectDeclaration.class, JetNamedFunction.class, JetProperty.class) ||
declaration instanceof JetEnumEntry) {
// Other subclasses are not valid for class name prediction.
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/JetNodeTypes.java b/compiler/frontend/src/org/jetbrains/kotlin/JetNodeTypes.java
index 7f730d0288ff0..3dc47f9843b55 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/JetNodeTypes.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/JetNodeTypes.java
@@ -35,7 +35,6 @@ public interface JetNodeTypes {
IElementType OBJECT_DECLARATION = JetStubElementTypes.OBJECT_DECLARATION;
JetNodeType OBJECT_DECLARATION_NAME = new JetNodeType("OBJECT_DECLARATION_NAME", JetObjectDeclarationName.class);
- IElementType CLASS_OBJECT = JetStubElementTypes.CLASS_OBJECT;
IElementType ENUM_ENTRY = JetStubElementTypes.ENUM_ENTRY;
IElementType ANONYMOUS_INITIALIZER = JetStubElementTypes.ANONYMOUS_INITIALIZER;
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/Errors.java b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/Errors.java
index 5ddce51d76c5c..b709ac255dd57 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/Errors.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/Errors.java
@@ -187,8 +187,8 @@ public interface Errors {
// Class objects
- DiagnosticFactory0<JetClassObject> MANY_CLASS_OBJECTS = DiagnosticFactory0.create(ERROR);
- DiagnosticFactory0<JetClassObject> CLASS_OBJECT_NOT_ALLOWED = DiagnosticFactory0.create(ERROR);
+ DiagnosticFactory0<JetObjectDeclaration> MANY_CLASS_OBJECTS = DiagnosticFactory0.create(ERROR);
+ DiagnosticFactory0<JetObjectDeclaration> CLASS_OBJECT_NOT_ALLOWED = DiagnosticFactory0.create(ERROR);
// Objects
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/PositioningStrategies.kt b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/PositioningStrategies.kt
index 9d5ed884af233..6e7aedb33e7a6 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/PositioningStrategies.kt
+++ b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/PositioningStrategies.kt
@@ -51,10 +51,11 @@ public object PositioningStrategies {
}
return markRange(objectKeyword, delegationSpecifierList)
}
- is JetClassObject -> {
- val classKeyword = element.getClassKeyword()
- val objectKeyword = element.getObjectDeclaration().getObjectKeyword()
- return markRange(classKeyword, objectKeyword)
+ is JetObjectDeclaration -> {
+ return markRange(
+ element.getClassKeyword() ?: element.getObjectKeyword(),
+ element.getNameIdentifier() ?: element.getObjectKeyword()
+ )
}
else -> {
return super.mark(element)
@@ -99,17 +100,7 @@ public object PositioningStrategies {
}
return markElement(nameIdentifier)
}
- if (element is JetObjectDeclaration) {
- val objectKeyword = element.getObjectKeyword()
- val parent = element.getParent()
- if (parent is JetClassObject) {
- val classKeyword = parent.getClassKeyword()
- val start = classKeyword ?: objectKeyword
- return markRange(start, objectKeyword)
- }
- return markElement(objectKeyword)
- }
- return super.mark(element)
+ return DEFAULT.mark(element)
}
}
@@ -240,7 +231,6 @@ public object PositioningStrategies {
is JetObjectDeclaration -> element.getObjectKeyword()
is JetPropertyAccessor -> element.getNamePlaceholder()
is JetClassInitializer -> element
- is JetClassObject -> element.getObjectDeclaration().getObjectKeyword()
else -> throw IllegalArgumentException(
"Can't find text range for element '${element.javaClass.getCanonicalName()}' with the text '${element.getText()}'")
}
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/psi/DebugTextUtil.kt b/compiler/frontend/src/org/jetbrains/kotlin/psi/DebugTextUtil.kt
index 7048fec93d6f1..41d76c6ac573f 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/psi/DebugTextUtil.kt
+++ b/compiler/frontend/src/org/jetbrains/kotlin/psi/DebugTextUtil.kt
@@ -188,11 +188,6 @@ private object DebugTextBuildingVisitor : JetVisitor<String, Unit>() {
return "initializer in " + (containingDeclaration?.getDebugText() ?: "...")
}
- override fun visitClassObject(classObject: JetClassObject, data: Unit?): String? {
- val containingDeclaration = JetStubbedPsiUtil.getContainingDeclaration(classObject)
- return "class object in " + (containingDeclaration?.getDebugText() ?: "...")
- }
-
override fun visitClassBody(classBody: JetClassBody, data: Unit?): String? {
val containingDeclaration = JetStubbedPsiUtil.getContainingDeclaration(classBody)
return "class body for " + (containingDeclaration?.getDebugText() ?: "...")
@@ -242,6 +237,9 @@ private object DebugTextBuildingVisitor : JetVisitor<String, Unit>() {
return buildText {
append("STUB: ")
appendInn(declaration.getModifierList(), suffix = " ")
+ if (declaration.isClassObject()) {
+ append("class ")
+ }
append("object ")
appendInn(declaration.getNameAsName())
appendInn(declaration.getDelegationSpecifierList(), prefix = " : ")
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/psi/JetClass.java b/compiler/frontend/src/org/jetbrains/kotlin/psi/JetClass.java
index b20663c7f4a78..f39b07909eb9c 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/psi/JetClass.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/psi/JetClass.java
@@ -142,7 +142,7 @@ public JetClassBody getBody() {
}
@Nullable
- public JetClassObject getClassObject() {
+ public JetObjectDeclaration getClassObject() {
JetClassBody body = getBody();
if (body == null) return null;
return body.getClassObject();
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/psi/JetClassBody.java b/compiler/frontend/src/org/jetbrains/kotlin/psi/JetClassBody.java
index 759a165ce24b8..07f8a0767c695 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/psi/JetClassBody.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/psi/JetClassBody.java
@@ -16,6 +16,7 @@
package org.jetbrains.kotlin.psi;
+import com.google.common.collect.Lists;
import com.intellij.lang.ASTNode;
import com.intellij.psi.PsiElement;
import com.intellij.psi.tree.TokenSet;
@@ -31,6 +32,7 @@
import java.util.Arrays;
import java.util.List;
+import static kotlin.KotlinPackage.firstOrNull;
import static org.jetbrains.kotlin.psi.stubs.elements.JetStubElementTypes.*;
public class JetClassBody extends JetElementImplStub<KotlinPlaceHolderStub<JetClassBody>> implements JetDeclarationContainer {
@@ -65,13 +67,19 @@ public List<JetProperty> getProperties() {
}
@Nullable
- public JetClassObject getClassObject() {
- return getStubOrPsiChild(CLASS_OBJECT);
+ public JetObjectDeclaration getClassObject() {
+ return firstOrNull(getAllClassObjects());
}
@NotNull
- public List<JetClassObject> getAllClassObjects() {
- return getStubOrPsiChildrenAsList(JetStubElementTypes.CLASS_OBJECT);
+ public List<JetObjectDeclaration> getAllClassObjects() {
+ List<JetObjectDeclaration> result = Lists.newArrayList();
+ for (JetObjectDeclaration declaration : getStubOrPsiChildrenAsList(JetStubElementTypes.OBJECT_DECLARATION)) {
+ if (declaration.isClassObject()) {
+ result.add(declaration);
+ }
+ }
+ return result;
}
@Nullable
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/psi/JetClassObject.java b/compiler/frontend/src/org/jetbrains/kotlin/psi/JetClassObject.java
deleted file mode 100644
index 3156e5f8c703a..0000000000000
--- a/compiler/frontend/src/org/jetbrains/kotlin/psi/JetClassObject.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright 2010-2015 JetBrains s.r.o.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.jetbrains.kotlin.psi;
-
-import com.intellij.lang.ASTNode;
-import com.intellij.psi.PsiElement;
-import org.jetbrains.annotations.NotNull;
-import org.jetbrains.kotlin.lexer.JetTokens;
-import org.jetbrains.kotlin.psi.stubs.KotlinPlaceHolderStub;
-import org.jetbrains.kotlin.psi.stubs.elements.JetStubElementTypes;
-
-public class JetClassObject extends JetDeclarationStub<KotlinPlaceHolderStub<JetClassObject>> implements JetStatementExpression {
- public JetClassObject(@NotNull ASTNode node) {
- super(node);
- }
-
- public JetClassObject(@NotNull KotlinPlaceHolderStub<JetClassObject> stub) {
- super(stub, JetStubElementTypes.CLASS_OBJECT);
- }
-
- @Override
- public <R, D> R accept(@NotNull JetVisitor<R, D> visitor, D data) {
- return visitor.visitClassObject(this, data);
- }
-
- @NotNull
- public JetObjectDeclaration getObjectDeclaration() {
- return getRequiredStubOrPsiChild(JetStubElementTypes.OBJECT_DECLARATION);
- }
-
- @NotNull
- public PsiElement getClassKeyword() {
- return findChildByType(JetTokens.CLASS_KEYWORD);
- }
-}
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/psi/JetNamedDeclarationUtil.java b/compiler/frontend/src/org/jetbrains/kotlin/psi/JetNamedDeclarationUtil.java
index cdc4e726f062e..afa4d338a2f13 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/psi/JetNamedDeclarationUtil.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/psi/JetNamedDeclarationUtil.java
@@ -51,9 +51,6 @@ public static FqNameUnsafe getUnsafeFQName(@NotNull JetNamedDeclaration namedDec
@Nullable
public static FqName getParentFqName(@NotNull JetNamedDeclaration namedDeclaration) {
PsiElement parent = namedDeclaration.getParent();
- if (parent instanceof JetClassObject) {
- parent = parent.getParent();
- }
if (parent instanceof JetClassBody) {
// One nesting to JetClassBody doesn't affect to qualified name
parent = parent.getParent();
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/psi/JetPsiFactory.kt b/compiler/frontend/src/org/jetbrains/kotlin/psi/JetPsiFactory.kt
index 618d439861e64..2747248114292 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/psi/JetPsiFactory.kt
+++ b/compiler/frontend/src/org/jetbrains/kotlin/psi/JetPsiFactory.kt
@@ -645,10 +645,6 @@ public class JetPsiFactory(private val project: Project) {
return createFunction("fun foo() {\n" + bodyText + "\n}").getBodyExpression() as JetBlockExpression
}
- public fun createEmptyClassObject(): JetClassObject {
- return createClass("class foo { class object { } }").getClassObject()!!
- }
-
public fun createComment(text: String): PsiComment {
val file = createFile(text)
val comments = file.getChildren().filterIsInstance<PsiComment>()
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/psi/JetPsiUtil.java b/compiler/frontend/src/org/jetbrains/kotlin/psi/JetPsiUtil.java
index 24af2ab33c7cb..85deec9989271 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/psi/JetPsiUtil.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/psi/JetPsiUtil.java
@@ -431,11 +431,6 @@ public static JetClassOrObject getOutermostClassOrObject(@NotNull JetClassOrObje
if (parent instanceof PsiFile) {
return current;
}
- if (parent instanceof JetClassObject) {
- // current class IS the class object declaration
- parent = parent.getParent();
- assert parent instanceof JetClassBody : "Parent of class object is not a class body: " + parent;
- }
if (!(parent instanceof JetClassBody)) {
// It is a local class, no legitimate outer
return current;
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/psi/JetVisitor.java b/compiler/frontend/src/org/jetbrains/kotlin/psi/JetVisitor.java
index 9d387a55a3651..79cf79de17d28 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/psi/JetVisitor.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/psi/JetVisitor.java
@@ -33,10 +33,6 @@ public R visitClass(@NotNull JetClass klass, D data) {
return visitNamedDeclaration(klass, data);
}
- public R visitClassObject(@NotNull JetClassObject classObject, D data) {
- return visitDeclaration(classObject, data);
- }
-
public R visitNamedFunction(@NotNull JetNamedFunction function, D data) {
return visitNamedDeclaration(function, data);
}
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/psi/JetVisitorVoid.java b/compiler/frontend/src/org/jetbrains/kotlin/psi/JetVisitorVoid.java
index 5a31d91f91c91..289cfd2370d24 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/psi/JetVisitorVoid.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/psi/JetVisitorVoid.java
@@ -33,10 +33,6 @@ public void visitClass(@NotNull JetClass klass) {
super.visitClass(klass, null);
}
- public void visitClassObject(@NotNull JetClassObject classObject) {
- super.visitClassObject(classObject, null);
- }
-
public void visitNamedFunction(@NotNull JetNamedFunction function) {
super.visitNamedFunction(function, null);
}
@@ -436,12 +432,6 @@ public final Void visitClass(@NotNull JetClass klass, Void data) {
return null;
}
- @Override
- public final Void visitClassObject(@NotNull JetClassObject classObject, Void data) {
- visitClassObject(classObject);
- return null;
- }
-
@Override
public final Void visitNamedFunction(@NotNull JetNamedFunction function, Void data) {
visitNamedFunction(function);
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/psi/JetVisitorVoidWithParameter.java b/compiler/frontend/src/org/jetbrains/kotlin/psi/JetVisitorVoidWithParameter.java
index 4011f0a8fdcf3..bc086241de6a4 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/psi/JetVisitorVoidWithParameter.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/psi/JetVisitorVoidWithParameter.java
@@ -34,10 +34,6 @@ public void visitClassVoid(@NotNull JetClass klass, P data) {
super.visitClass(klass, data);
}
- public void visitClassObjectVoid(@NotNull JetClassObject classObject, P data) {
- super.visitClassObject(classObject, data);
- }
-
public void visitNamedFunctionVoid(@NotNull JetNamedFunction function, P data) {
super.visitNamedFunction(function, data);
}
@@ -433,12 +429,6 @@ public final Void visitClass(@NotNull JetClass klass, P data) {
return null;
}
- @Override
- public final Void visitClassObject(@NotNull JetClassObject classObject, P data) {
- visitClassObjectVoid(classObject, data);
- return null;
- }
-
@Override
public final Void visitNamedFunction(@NotNull JetNamedFunction function, P data) {
visitNamedFunctionVoid(function, data);
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/psi/stubs/elements/JetObjectElementType.java b/compiler/frontend/src/org/jetbrains/kotlin/psi/stubs/elements/JetObjectElementType.java
index 144f5499baa9c..82cd27d33a46a 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/psi/stubs/elements/JetObjectElementType.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/psi/stubs/elements/JetObjectElementType.java
@@ -24,7 +24,6 @@
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.kotlin.name.FqName;
-import org.jetbrains.kotlin.psi.JetClassObject;
import org.jetbrains.kotlin.psi.JetObjectDeclaration;
import org.jetbrains.kotlin.psi.psiUtil.PsiUtilPackage;
import org.jetbrains.kotlin.psi.stubs.KotlinObjectStub;
@@ -46,7 +45,7 @@ public KotlinObjectStub createStub(@NotNull JetObjectDeclaration psi, StubElemen
FqName fqName = ResolveSessionUtils.safeFqNameForLazyResolve(psi);
List<String> superNames = PsiUtilPackage.getSuperNames(psi);
return new KotlinObjectStubImpl(parentStub, StringRef.fromString(name), fqName, Utils.INSTANCE$.wrapStrings(superNames),
- psi.isTopLevel(), isClassObject(psi), psi.isLocal(), psi.isObjectLiteral());
+ psi.isTopLevel(), psi.isClassObject(), psi.isLocal(), psi.isObjectLiteral());
}
@Override
@@ -93,8 +92,4 @@ public KotlinObjectStub deserialize(@NotNull StubInputStream dataStream, StubEle
public void indexStub(@NotNull KotlinObjectStub stub, @NotNull IndexSink sink) {
StubIndexServiceFactory.getInstance().indexObject(stub, sink);
}
-
- private static boolean isClassObject(@NotNull JetObjectDeclaration objectDeclaration) {
- return objectDeclaration.getParent() instanceof JetClassObject;
- }
}
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/psi/stubs/elements/JetStubElementTypes.java b/compiler/frontend/src/org/jetbrains/kotlin/psi/stubs/elements/JetStubElementTypes.java
index 8bed79d669d33..eba0047c572a7 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/psi/stubs/elements/JetStubElementTypes.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/psi/stubs/elements/JetStubElementTypes.java
@@ -31,8 +31,6 @@ public interface JetStubElementTypes {
JetClassElementType ENUM_ENTRY = new JetClassElementType("ENUM_ENTRY");
JetObjectElementType OBJECT_DECLARATION = new JetObjectElementType("OBJECT_DECLARATION");
- JetPlaceHolderStubElementType<JetClassObject> CLASS_OBJECT =
- new JetPlaceHolderStubElementType<JetClassObject>("CLASS_OBJECT", JetClassObject.class);
JetPlaceHolderStubElementType<JetClassInitializer> ANONYMOUS_INITIALIZER =
new JetPlaceHolderStubElementType<JetClassInitializer>("ANONYMOUS_INITIALIZER", JetClassInitializer.class);
@@ -115,7 +113,7 @@ public interface JetStubElementTypes {
new JetPlaceHolderStubElementType<JetConstructorCalleeExpression>("CONSTRUCTOR_CALLEE", JetConstructorCalleeExpression.class);
TokenSet DECLARATION_TYPES =
- TokenSet.create(CLASS, OBJECT_DECLARATION, CLASS_OBJECT, FUNCTION, PROPERTY, ANONYMOUS_INITIALIZER, ENUM_ENTRY);
+ TokenSet.create(CLASS, OBJECT_DECLARATION, FUNCTION, PROPERTY, ANONYMOUS_INITIALIZER, ENUM_ENTRY);
TokenSet DELEGATION_SPECIFIER_TYPES = TokenSet.create(DELEGATOR_BY, DELEGATOR_SUPER_CALL, DELEGATOR_SUPER_CLASS, THIS_CALL);
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/LazyTopDownAnalyzer.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/LazyTopDownAnalyzer.java
index 174de2faaa9a8..693f292281975 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/LazyTopDownAnalyzer.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/LazyTopDownAnalyzer.java
@@ -199,9 +199,9 @@ private void checkManyClassObjects(JetClassOrObject classOrObject) {
for (JetDeclaration jetDeclaration : classOrObject.getDeclarations()) {
jetDeclaration.accept(this);
- if (jetDeclaration instanceof JetClassObject) {
+ if (jetDeclaration instanceof JetObjectDeclaration && ((JetObjectDeclaration) jetDeclaration).isClassObject()) {
if (classObjectAlreadyFound) {
- trace.report(MANY_CLASS_OBJECTS.on((JetClassObject) jetDeclaration));
+ trace.report(MANY_CLASS_OBJECTS.on((JetObjectDeclaration) jetDeclaration));
}
classObjectAlreadyFound = true;
}
@@ -226,11 +226,6 @@ private void registerPrimaryConstructorParameters(@NotNull JetClass klass) {
}
}
- @Override
- public void visitClassObject(@NotNull JetClassObject classObject) {
- visitClassOrObject(classObject.getObjectDeclaration());
- }
-
@Override
public void visitEnumEntry(@NotNull JetEnumEntry enumEntry) {
visitClassOrObject(enumEntry);
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/ModifiersChecker.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/ModifiersChecker.java
index c696b2d377983..abc640b8fa1a6 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/ModifiersChecker.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/ModifiersChecker.java
@@ -171,7 +171,7 @@ private void checkModalityModifiers(@NotNull JetModifierListOwner modifierListOw
checkCompatibility(modifierList, Arrays.asList(ABSTRACT_KEYWORD, OPEN_KEYWORD, FINAL_KEYWORD),
Arrays.asList(ABSTRACT_KEYWORD, OPEN_KEYWORD));
- if (modifierListOwner.getParent() instanceof JetClassObject || modifierListOwner instanceof JetObjectDeclaration) {
+ if (modifierListOwner instanceof JetObjectDeclaration) {
checkIllegalModalityModifiers(modifierListOwner);
}
else if (modifierListOwner instanceof JetClassOrObject) {
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/DeclarationScopeProviderImpl.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/DeclarationScopeProviderImpl.java
index abe23c0359807..ae18ed7ded8e5 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/DeclarationScopeProviderImpl.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/DeclarationScopeProviderImpl.java
@@ -68,16 +68,6 @@ public JetScope getResolutionScopeForDeclaration(@NotNull PsiElement elementOfDe
return classDescriptor.getScopeForMemberDeclarationResolution();
}
- if (parentDeclaration instanceof JetClassObject) {
- assert jetDeclaration instanceof JetObjectDeclaration : "Should be situation for getting scope for object in class [object {...}]";
-
- JetClassObject classObject = (JetClassObject) parentDeclaration;
- LazyClassDescriptor classObjectDescriptor =
- (LazyClassDescriptor) lazyDeclarationResolver.getClassObjectDescriptor(classObject).getContainingDeclaration();
-
- return classObjectDescriptor.getScopeForMemberDeclarationResolution();
- }
-
throw new IllegalStateException("Don't call this method for local declarations: " + jetDeclaration + "\n" +
JetPsiUtil.getElementTextWithContext(jetDeclaration));
}
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/LazyDeclarationResolver.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/LazyDeclarationResolver.java
index c35a346e0cfb1..ea126f9accc6d 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/LazyDeclarationResolver.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/LazyDeclarationResolver.java
@@ -27,7 +27,6 @@
import org.jetbrains.kotlin.renderer.DescriptorRenderer;
import org.jetbrains.kotlin.resolve.BindingContext;
import org.jetbrains.kotlin.resolve.BindingTrace;
-import org.jetbrains.kotlin.resolve.lazy.descriptors.LazyClassDescriptor;
import org.jetbrains.kotlin.resolve.lazy.descriptors.LazyPackageDescriptor;
import org.jetbrains.kotlin.resolve.scopes.JetScope;
import org.jetbrains.kotlin.storage.LockBasedLazyResolveStorageManager;
@@ -65,13 +64,6 @@ public LazyDeclarationResolver(
@NotNull
public ClassDescriptor getClassDescriptor(@NotNull JetClassOrObject classOrObject) {
- if (classOrObject instanceof JetObjectDeclaration) {
- JetObjectDeclaration objectDeclaration = (JetObjectDeclaration) classOrObject;
- JetClassObject classObjectElement = objectDeclaration.getClassObjectElement();
- if (classObjectElement != null) {
- return getClassObjectDescriptor(classObjectElement);
- }
- }
JetScope resolutionScope = resolutionScopeToResolveDeclaration(classOrObject);
// Why not use the result here. Because it may be that there is a redeclaration:
@@ -93,31 +85,6 @@ public ClassDescriptor getClassDescriptor(@NotNull JetClassOrObject classOrObjec
return (ClassDescriptor) descriptor;
}
- @NotNull
- /*package*/ LazyClassDescriptor getClassObjectDescriptor(@NotNull JetClassObject classObject) {
- JetClass aClass = JetStubbedPsiUtil.getContainingDeclaration(classObject, JetClass.class);
-
- LazyClassDescriptor parentClassDescriptor;
-
- if (aClass != null) {
- parentClassDescriptor = (LazyClassDescriptor) getClassDescriptor(aClass);
- }
- else {
- // Class object in object is an error but we want to find descriptors even for this case
- JetObjectDeclaration objectDeclaration = PsiTreeUtil.getParentOfType(classObject, JetObjectDeclaration.class);
- assert objectDeclaration != null : String.format("Class object %s can be in class or object in file %s", classObject, classObject.getContainingFile().getText());
- parentClassDescriptor = (LazyClassDescriptor) getClassDescriptor(objectDeclaration);
- }
-
- // Activate resolution and writing to trace
- parentClassDescriptor.getClassObjectDescriptor();
- parentClassDescriptor.getDescriptorsForExtraClassObjects();
- DeclarationDescriptor classObjectDescriptor = getBindingContext().get(BindingContext.DECLARATION_TO_DESCRIPTOR, classObject.getObjectDeclaration());
- assert classObjectDescriptor != null : "No descriptor found for " + JetPsiUtil.getElementTextWithContext(classObject);
-
- return (LazyClassDescriptor) classObjectDescriptor;
- }
-
@NotNull
private BindingContext getBindingContext() {
return trace.getBindingContext();
@@ -133,19 +100,9 @@ public DeclarationDescriptor visitClass(@NotNull JetClass klass, Void data) {
@Override
public DeclarationDescriptor visitObjectDeclaration(@NotNull JetObjectDeclaration declaration, Void data) {
- PsiElement parent = declaration.getParent();
- if (parent instanceof JetClassObject) {
- JetClassObject jetClassObject = (JetClassObject) parent;
- return resolveToDescriptor(jetClassObject);
- }
return getClassDescriptor(declaration);
}
- @Override
- public DeclarationDescriptor visitClassObject(@NotNull JetClassObject classObject, Void data) {
- return getClassObjectDescriptor(classObject);
- }
-
@Override
public DeclarationDescriptor visitTypeParameter(@NotNull JetTypeParameter parameter, Void data) {
JetTypeParameterListOwner ownerElement = PsiTreeUtil.getParentOfType(parameter, JetTypeParameterListOwner.class);
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetClassInfo.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetClassInfo.java
index bbbc86c5d656e..a4028be51417f 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetClassInfo.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetClassInfo.java
@@ -46,7 +46,7 @@ else if (element.isEnum()) {
}
@Override
- public JetClassObject getClassObject() {
+ public JetObjectDeclaration getClassObject() {
return element.getClassObject();
}
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetClassLikeInfo.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetClassLikeInfo.java
index 71cc9d4d068c4..9c10c34b58df5 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetClassLikeInfo.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetClassLikeInfo.java
@@ -21,8 +21,8 @@
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.ReadOnly;
import org.jetbrains.kotlin.descriptors.ClassKind;
-import org.jetbrains.kotlin.psi.*;
import org.jetbrains.kotlin.name.FqName;
+import org.jetbrains.kotlin.psi.*;
import java.util.List;
@@ -34,11 +34,11 @@ public interface JetClassLikeInfo extends JetDeclarationContainer {
JetModifierList getModifierList();
@Nullable
- JetClassObject getClassObject();
+ JetObjectDeclaration getClassObject();
@NotNull
@ReadOnly
- List<JetClassObject> getClassObjects();
+ List<JetObjectDeclaration> getClassObjects();
// This element is used to identify resolution scope for the class
@NotNull
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetClassOrObjectInfo.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetClassOrObjectInfo.java
index 2001283c1c79a..93e12041d6b0b 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetClassOrObjectInfo.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetClassOrObjectInfo.java
@@ -58,7 +58,7 @@ public List<JetDeclaration> getDeclarations() {
@NotNull
@Override
- public List<JetClassObject> getClassObjects() {
+ public List<JetObjectDeclaration> getClassObjects() {
JetClassBody body = element.getBody();
if (body == null) {
return Collections.emptyList();
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetObjectInfo.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetObjectInfo.java
index d68b8f57abefd..60266d5046e5b 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetObjectInfo.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetObjectInfo.java
@@ -19,7 +19,6 @@
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.kotlin.descriptors.ClassKind;
-import org.jetbrains.kotlin.psi.JetClassObject;
import org.jetbrains.kotlin.psi.JetObjectDeclaration;
import org.jetbrains.kotlin.psi.JetParameter;
import org.jetbrains.kotlin.psi.JetTypeParameterList;
@@ -39,7 +38,7 @@ protected JetObjectInfo(@NotNull JetObjectDeclaration element) {
}
@Override
- public JetClassObject getClassObject() {
+ public JetObjectDeclaration getClassObject() {
return null;
}
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetScriptInfo.kt b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetScriptInfo.kt
index 96fa7ecad50df..9e023b4165590 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetScriptInfo.kt
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/data/JetScriptInfo.kt
@@ -28,7 +28,7 @@ public class JetScriptInfo(
override fun getContainingPackageFqName() = fqName.parent()
override fun getModifierList() = null
override fun getClassObject() = null
- override fun getClassObjects() = listOf<JetClassObject>()
+ override fun getClassObjects() = listOf<JetObjectDeclaration>()
override fun getScopeAnchor() = script
override fun getCorrespondingClassOrObject() = null
override fun getTypeParameterList() = null
@@ -44,5 +44,5 @@ public fun shouldBeScriptClassMember(declaration: JetDeclaration): Boolean {
// we only add those vals, vars and funs that have explicitly specified return types
// (or implicit Unit for function with block body)
return declaration is JetCallableDeclaration && declaration.getTypeReference() != null
- || declaration is JetNamedFunction && declaration.hasBlockBody()
+ || declaration is JetNamedFunction && declaration.hasBlockBody()
}
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/declarations/PsiBasedClassMemberDeclarationProvider.kt b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/declarations/PsiBasedClassMemberDeclarationProvider.kt
index e8caeb8367240..ad22909fc45f7 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/declarations/PsiBasedClassMemberDeclarationProvider.kt
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/declarations/PsiBasedClassMemberDeclarationProvider.kt
@@ -29,12 +29,7 @@ public class PsiBasedClassMemberDeclarationProvider(
override fun doCreateIndex(index: AbstractPsiBasedDeclarationProvider.Index) {
for (declaration in classInfo.getDeclarations()) {
- if (declaration !is JetClassObject) {
- index.putToIndex(declaration)
- }
- else {
- index.putToIndex(declaration.getObjectDeclaration())
- }
+ index.putToIndex(declaration)
}
for (parameter in classInfo.getPrimaryConstructorParameters()) {
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/descriptors/LazyClassDescriptor.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/descriptors/LazyClassDescriptor.java
index 6bb0ad2355c88..81d02caf9367e 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/descriptors/LazyClassDescriptor.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/descriptors/LazyClassDescriptor.java
@@ -83,7 +83,7 @@ public boolean apply(JetType type) {
private final Annotations annotations;
private final Annotations danglingAnnotations;
private final NullableLazyValue<LazyClassDescriptor> classObjectDescriptor;
- private final MemoizedFunctionToNotNull<JetClassObject, ClassDescriptor> extraClassObjectDescriptors;
+ private final MemoizedFunctionToNotNull<JetObjectDeclaration, ClassDescriptor> extraClassObjectDescriptors;
private final LazyClassMemberScope unsubstitutedMemberScope;
private final JetScope staticScope = new StaticScopeForKotlinClass(this);
@@ -186,9 +186,9 @@ public LazyClassDescriptor invoke() {
return computeClassObjectDescriptor(getClassObjectIfAllowed());
}
});
- this.extraClassObjectDescriptors = storageManager.createMemoizedFunction(new Function1<JetClassObject, ClassDescriptor>() {
+ this.extraClassObjectDescriptors = storageManager.createMemoizedFunction(new Function1<JetObjectDeclaration, ClassDescriptor>() {
@Override
- public ClassDescriptor invoke(JetClassObject classObject) {
+ public ClassDescriptor invoke(JetObjectDeclaration classObject) {
return computeClassObjectDescriptor(classObject);
}
});
@@ -357,21 +357,21 @@ public LazyClassDescriptor getClassObjectDescriptor() {
@NotNull
@ReadOnly
public List<ClassDescriptor> getDescriptorsForExtraClassObjects() {
- final JetClassObject allowedClassObject = getClassObjectIfAllowed();
+ final JetObjectDeclaration allowedClassObject = getClassObjectIfAllowed();
return KotlinPackage.map(
KotlinPackage.filter(
declarationProvider.getOwnerInfo().getClassObjects(),
- new Function1<JetClassObject, Boolean>() {
+ new Function1<JetObjectDeclaration, Boolean>() {
@Override
- public Boolean invoke(JetClassObject classObject) {
+ public Boolean invoke(JetObjectDeclaration classObject) {
return classObject != allowedClassObject;
}
}
),
- new Function1<JetClassObject, ClassDescriptor>() {
+ new Function1<JetObjectDeclaration, ClassDescriptor>() {
@Override
- public ClassDescriptor invoke(JetClassObject classObject) {
+ public ClassDescriptor invoke(JetObjectDeclaration classObject) {
return extraClassObjectDescriptors.invoke(classObject);
}
}
@@ -379,7 +379,7 @@ public ClassDescriptor invoke(JetClassObject classObject) {
}
@Nullable
- private LazyClassDescriptor computeClassObjectDescriptor(@Nullable JetClassObject classObject) {
+ private LazyClassDescriptor computeClassObjectDescriptor(@Nullable JetObjectDeclaration classObject) {
JetClassLikeInfo classObjectInfo = getClassObjectInfo(classObject);
if (classObjectInfo instanceof JetClassOrObjectInfo) {
Name name = ((JetClassOrObjectInfo) classObjectInfo).getName();
@@ -404,21 +404,21 @@ private LazyClassDescriptor computeClassObjectDescriptor(@Nullable JetClassObjec
}
@Nullable
- private JetClassLikeInfo getClassObjectInfo(@Nullable JetClassObject classObject) {
+ private JetClassLikeInfo getClassObjectInfo(@Nullable JetObjectDeclaration classObject) {
if (classObject != null) {
if (!isClassObjectAllowed()) {
c.getTrace().report(CLASS_OBJECT_NOT_ALLOWED.on(classObject));
}
- return JetClassInfoUtil.createClassLikeInfo(classObject.getObjectDeclaration());
+ return JetClassInfoUtil.createClassLikeInfo(classObject);
}
return null;
}
@Nullable
- private JetClassObject getClassObjectIfAllowed() {
- JetClassObject classObject = declarationProvider.getOwnerInfo().getClassObject();
+ private JetObjectDeclaration getClassObjectIfAllowed() {
+ JetObjectDeclaration classObject = declarationProvider.getOwnerInfo().getClassObject();
return (classObject != null && isClassObjectAllowed()) ? classObject : null;
}
diff --git a/compiler/tests/org/jetbrains/kotlin/renderer/AbstractDescriptorRendererTest.kt b/compiler/tests/org/jetbrains/kotlin/renderer/AbstractDescriptorRendererTest.kt
index 1d7ec94365c3c..c72a8efff2aac 100644
--- a/compiler/tests/org/jetbrains/kotlin/renderer/AbstractDescriptorRendererTest.kt
+++ b/compiler/tests/org/jetbrains/kotlin/renderer/AbstractDescriptorRendererTest.kt
@@ -71,10 +71,6 @@ public abstract class AbstractDescriptorRendererTest : KotlinTestWithEnvironment
file.acceptChildren(this)
}
- override fun visitClassObject(classObject: JetClassObject) {
- classObject.acceptChildren(this)
- }
-
override fun visitParameter(parameter: JetParameter) {
val declaringElement = parameter.getParent().getParent()
when (declaringElement) {
diff --git a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/JetIconProvider.java b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/JetIconProvider.java
index 34449055df625..fbc6287820714 100644
--- a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/JetIconProvider.java
+++ b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/JetIconProvider.java
@@ -155,7 +155,7 @@ public static Icon getBaseIcon(PsiElement psiElement) {
}
return icon;
}
- if (psiElement instanceof JetObjectDeclaration || psiElement instanceof JetClassObject) {
+ if (psiElement instanceof JetObjectDeclaration) {
return JetIcons.OBJECT;
}
if (psiElement instanceof JetParameter) {
diff --git a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/decompiler/navigation/JetSourceNavigationHelper.java b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/decompiler/navigation/JetSourceNavigationHelper.java
index 954583cd8f142..9d085903b89ec 100644
--- a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/decompiler/navigation/JetSourceNavigationHelper.java
+++ b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/decompiler/navigation/JetSourceNavigationHelper.java
@@ -29,7 +29,6 @@
import com.intellij.psi.PsiFile;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.stubs.StringStubIndexExtension;
-import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.containers.ContainerUtil;
import gnu.trove.THashSet;
import kotlin.KotlinPackage;
@@ -80,30 +79,9 @@ private JetSourceNavigationHelper() {
@Nullable
public static JetClassOrObject getSourceClassOrObject(@NotNull JetClassOrObject decompiledClassOrObject) {
- if (decompiledClassOrObject instanceof JetObjectDeclaration && decompiledClassOrObject.getParent() instanceof JetClassObject) {
- return getSourceClassObject((JetClassObject) decompiledClassOrObject.getParent());
- }
return getSourceForNamedClassOrObject(decompiledClassOrObject);
}
- private static JetClassOrObject getSourceClassObject(JetClassObject decompiledClassObject) {
- JetClass decompiledClass = PsiTreeUtil.getParentOfType(decompiledClassObject, JetClass.class);
- assert decompiledClass != null;
-
- JetClass sourceClass = (JetClass) getSourceForNamedClassOrObject(decompiledClass);
- if (sourceClass == null) {
- return null;
- }
-
- if (sourceClass.hasModifier(JetTokens.ENUM_KEYWORD)) {
- return sourceClass;
- }
-
- JetClassObject classObject = sourceClass.getClassObject();
- assert classObject != null;
- return classObject.getObjectDeclaration();
- }
-
@NotNull
private static GlobalSearchScope createLibrarySourcesScope(@NotNull JetNamedDeclaration decompiledDeclaration) {
JetFile containingFile = decompiledDeclaration.getContainingJetFile();
@@ -458,11 +436,6 @@ public JetDeclaration visitObjectDeclaration(@NotNull JetObjectDeclaration decla
return getSourceClassOrObject(declaration);
}
- @Override
- public JetDeclaration visitClassObject(@NotNull JetClassObject classObject, Void data) {
- return getSourceClassObject(classObject);
- }
-
@Override
public JetDeclaration visitClass(@NotNull JetClass klass, Void data) {
return getSourceClassOrObject(klass);
diff --git a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/decompiler/stubBuilder/ClassClsStubBuilder.kt b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/decompiler/stubBuilder/ClassClsStubBuilder.kt
index 06cd4df8e9ace..b60db677e847c 100644
--- a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/decompiler/stubBuilder/ClassClsStubBuilder.kt
+++ b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/decompiler/stubBuilder/ClassClsStubBuilder.kt
@@ -31,7 +31,6 @@ import org.jetbrains.kotlin.name.ClassId
import org.jetbrains.kotlin.psi.JetDelegationSpecifierList
import org.jetbrains.kotlin.psi.JetDelegatorToSuperClass
import org.jetbrains.kotlin.lexer.JetTokens
-import org.jetbrains.kotlin.psi.JetClassObject
import org.jetbrains.kotlin.serialization.deserialization.ProtoContainer
import org.jetbrains.kotlin.psi.stubs.impl.KotlinModifierListStubImpl
import org.jetbrains.kotlin.lexer.JetModifierKeywordToken
@@ -80,20 +79,10 @@ private class ClassClsStubBuilder(
}
private fun createClassOrObjectStubAndModifierListStub(): StubElement<out PsiElement> {
- val isClassObject = classKind == ProtoBuf.Class.Kind.CLASS_OBJECT
- if (isClassObject) {
- val classObjectStub = KotlinPlaceHolderStubImpl<JetClassObject>(parentStub, JetStubElementTypes.CLASS_OBJECT)
- val modifierList = createModifierListForClass(classObjectStub)
- val objectDeclarationStub = doCreateClassOrObjectStub(classObjectStub)
- createAnnotationStubs(c.components.annotationLoader.loadClassAnnotations(classProto, c.nameResolver), modifierList)
- return objectDeclarationStub
- }
- else {
- val classOrObjectStub = doCreateClassOrObjectStub(parentStub)
- val modifierList = createModifierListForClass(classOrObjectStub)
- createAnnotationStubs(c.components.annotationLoader.loadClassAnnotations(classProto, c.nameResolver), modifierList)
- return classOrObjectStub
- }
+ val classOrObjectStub = doCreateClassOrObjectStub()
+ val modifierList = createModifierListForClass(classOrObjectStub)
+ createAnnotationStubs(c.components.annotationLoader.loadClassAnnotations(classProto, c.nameResolver), modifierList)
+ return classOrObjectStub
}
private fun createModifierListForClass(parent: StubElement<out PsiElement>): KotlinModifierListStubImpl {
@@ -110,7 +99,7 @@ private class ClassClsStubBuilder(
return createModifierListStubForDeclaration(parent, classProto.getFlags(), relevantFlags, additionalModifiers)
}
- private fun doCreateClassOrObjectStub(parent: StubElement<out PsiElement>): StubElement<out PsiElement> {
+ private fun doCreateClassOrObjectStub(): StubElement<out PsiElement> {
val isClassObject = classKind == ProtoBuf.Class.Kind.CLASS_OBJECT
val fqName = outerContext.memberFqNameProvider.getMemberFqName(classId.getRelativeClassName().shortName())
val shortName = fqName.shortName()?.ref()
@@ -121,7 +110,7 @@ private class ClassClsStubBuilder(
return when (classKind) {
ProtoBuf.Class.Kind.OBJECT, ProtoBuf.Class.Kind.CLASS_OBJECT -> {
KotlinObjectStubImpl(
- parent, shortName, fqName, superTypeRefs,
+ parentStub, shortName, fqName, superTypeRefs,
isTopLevel = classId.isTopLevelClass(),
isClassObject = isClassObject,
isLocal = false,
@@ -131,7 +120,7 @@ private class ClassClsStubBuilder(
else -> {
KotlinClassStubImpl(
JetClassElementType.getStubType(classKind == ProtoBuf.Class.Kind.ENUM_ENTRY),
- parent,
+ parentStub,
fqName.ref(),
shortName,
superTypeRefs,
diff --git a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/highlighter/KotlinSuppressableWarningProblemGroup.kt b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/highlighter/KotlinSuppressableWarningProblemGroup.kt
index 80c79f25a62fb..e0e6ea1cb9a38 100644
--- a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/highlighter/KotlinSuppressableWarningProblemGroup.kt
+++ b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/highlighter/KotlinSuppressableWarningProblemGroup.kt
@@ -85,9 +85,6 @@ private object DeclarationKindDetector : JetVisitor<AnnotationHostKind?, Unit?>(
override fun visitClass(d: JetClass, _: Unit?) = detect(d, if (d.isTrait()) "trait" else "class")
- override fun visitClassObject(d: JetClassObject, _: Unit?) = detect(d, "class object",
- name = "of " + d.getStrictParentOfType<JetClass>()?.getName())
-
override fun visitNamedFunction(d: JetNamedFunction, _: Unit?) = detect(d, "fun")
override fun visitProperty(d: JetProperty, _: Unit?) = detect(d, d.getValOrVarNode().getText()!!)
@@ -102,7 +99,7 @@ private object DeclarationKindDetector : JetVisitor<AnnotationHostKind?, Unit?>(
override fun visitParameter(d: JetParameter, _: Unit?) = detect(d, "parameter", newLineNeeded = false)
override fun visitObjectDeclaration(d: JetObjectDeclaration, _: Unit?): AnnotationHostKind? {
- if (d.getParent() is JetClassObject) return null
+ if (d.isClassObject()) return detect(d, "class object", name = "${d.getName()} of ${d.getStrictParentOfType<JetClass>()?.getName()}")
if (d.getParent() is JetObjectLiteralExpression) return null
return detect(d, "object")
}
diff --git a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/structureView/JetStructureViewElement.java b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/structureView/JetStructureViewElement.java
index 414faec45f59c..7e9b74b066e6f 100644
--- a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/structureView/JetStructureViewElement.java
+++ b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/structureView/JetStructureViewElement.java
@@ -166,10 +166,6 @@ else if (element instanceof JetClass) {
else if (element instanceof JetClassOrObject) {
return ((JetClassOrObject) element).getDeclarations();
}
- else if (element instanceof JetClassObject) {
- JetObjectDeclaration objectDeclaration = ((JetClassObject) element).getObjectDeclaration();
- return objectDeclaration.getDeclarations();
- }
return Collections.emptyList();
}
diff --git a/idea/src/org/jetbrains/kotlin/idea/codeInsight/upDownMover/JetDeclarationMover.java b/idea/src/org/jetbrains/kotlin/idea/codeInsight/upDownMover/JetDeclarationMover.java
index 67eaaba358378..085902e24fcad 100644
--- a/idea/src/org/jetbrains/kotlin/idea/codeInsight/upDownMover/JetDeclarationMover.java
+++ b/idea/src/org/jetbrains/kotlin/idea/codeInsight/upDownMover/JetDeclarationMover.java
@@ -60,8 +60,10 @@ public void visitAnonymousInitializer(@NotNull JetClassInitializer initializer)
}
@Override
- public void visitClassObject(@NotNull JetClassObject classObject) {
- memberSuspects.add(classObject.getClassKeyword());
+ public void visitObjectDeclaration(@NotNull JetObjectDeclaration declaration) {
+ if (declaration.isClassObject()) {
+ memberSuspects.add(declaration.getClassKeyword());
+ }
}
@Override
diff --git a/idea/src/org/jetbrains/kotlin/idea/debugger/evaluate/extractFunctionForDebuggerUtil.kt b/idea/src/org/jetbrains/kotlin/idea/debugger/evaluate/extractFunctionForDebuggerUtil.kt
index 8cfa6e61d15a7..be9de56bb587a 100644
--- a/idea/src/org/jetbrains/kotlin/idea/debugger/evaluate/extractFunctionForDebuggerUtil.kt
+++ b/idea/src/org/jetbrains/kotlin/idea/debugger/evaluate/extractFunctionForDebuggerUtil.kt
@@ -193,9 +193,6 @@ private fun addDebugExpressionBeforeContextElement(codeFragment: JetCodeFragment
contextElement is JetClassOrObject -> {
insertNewInitializer(contextElement.getBody())
}
- contextElement is JetClassObject -> {
- insertNewInitializer(contextElement.getObjectDeclaration().getBody())
- }
contextElement is JetFunctionLiteral -> {
val block = contextElement.getBodyExpression()!!
block.getStatements().firstOrNull() ?: block.getLastChild()
diff --git a/idea/src/org/jetbrains/kotlin/idea/projectView/JetClassObjectTreeNode.java b/idea/src/org/jetbrains/kotlin/idea/projectView/JetClassObjectTreeNode.java
deleted file mode 100644
index 3ac3c7db97dcd..0000000000000
--- a/idea/src/org/jetbrains/kotlin/idea/projectView/JetClassObjectTreeNode.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright 2010-2015 JetBrains s.r.o.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.jetbrains.kotlin.idea.projectView;
-
-import com.intellij.ide.projectView.PresentationData;
-import com.intellij.ide.projectView.ViewSettings;
-import com.intellij.ide.projectView.impl.nodes.AbstractPsiBasedNode;
-import com.intellij.ide.util.treeView.AbstractTreeNode;
-import com.intellij.openapi.project.Project;
-import com.intellij.psi.PsiElement;
-import org.jetbrains.kotlin.psi.JetClassObject;
-import org.jetbrains.kotlin.psi.JetPsiUtil;
-
-import java.util.Collection;
-
-import static org.jetbrains.kotlin.idea.projectView.JetProjectViewUtil.canRepresentPsiElement;
-import static org.jetbrains.kotlin.idea.projectView.JetProjectViewUtil.getClassOrObjectChildren;
-
-public class JetClassObjectTreeNode extends AbstractPsiBasedNode<JetClassObject> {
- protected JetClassObjectTreeNode(Project project, JetClassObject classObject, ViewSettings viewSettings) {
- super(project, classObject, viewSettings);
- }
-
- @Override
- protected PsiElement extractPsiFromValue() {
- return getValue();
- }
-
- @Override
- protected Collection<AbstractTreeNode> getChildrenImpl() {
- return getClassOrObjectChildren(getValue().getObjectDeclaration(), getProject(), getSettings());
- }
-
- @Override
- protected void updateImpl(PresentationData data) {
- data.setPresentableText("<class object>");
- }
-
- @Override
- public boolean canRepresent(Object element) {
- if (!isValid()) {
- return false;
- }
-
- return super.canRepresent(element) || canRepresentPsiElement(getValue(), element, getSettings());
- }
-
- @Override
- protected boolean isDeprecated() {
- return JetPsiUtil.isDeprecated(getValue());
- }
-}
diff --git a/idea/src/org/jetbrains/kotlin/idea/projectView/JetProjectViewUtil.java b/idea/src/org/jetbrains/kotlin/idea/projectView/JetProjectViewUtil.java
index 38a44fd4ea42d..e833fb608649a 100644
--- a/idea/src/org/jetbrains/kotlin/idea/projectView/JetProjectViewUtil.java
+++ b/idea/src/org/jetbrains/kotlin/idea/projectView/JetProjectViewUtil.java
@@ -21,7 +21,6 @@
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
-import org.jetbrains.kotlin.psi.JetClassObject;
import org.jetbrains.kotlin.psi.JetClassOrObject;
import org.jetbrains.kotlin.psi.JetDeclaration;
@@ -44,9 +43,6 @@ public static Collection<AbstractTreeNode> getClassOrObjectChildren(JetClassOrOb
if (declaration instanceof JetClassOrObject) {
result.add(new JetClassOrObjectTreeNode(project, (JetClassOrObject) declaration, settings));
}
- else if (declaration instanceof JetClassObject) {
- result.add(new JetClassObjectTreeNode(project, (JetClassObject) declaration, settings));
- }
else {
result.add(new JetDeclarationTreeNode(project, declaration, settings));
}
diff --git a/idea/src/org/jetbrains/kotlin/idea/run/JetRunConfigurationProducer.java b/idea/src/org/jetbrains/kotlin/idea/run/JetRunConfigurationProducer.java
index 42a86aac9a32d..ab151d894ca85 100644
--- a/idea/src/org/jetbrains/kotlin/idea/run/JetRunConfigurationProducer.java
+++ b/idea/src/org/jetbrains/kotlin/idea/run/JetRunConfigurationProducer.java
@@ -117,8 +117,7 @@ public FunctionDescriptor fun(JetNamedFunction function) {
currentElement = PsiTreeUtil.getParentOfType((PsiElement) currentElement, JetClassOrObject.class, JetFile.class)) {
JetDeclarationContainer entryPointContainer = currentElement;
if (entryPointContainer instanceof JetClass) {
- JetClassObject classObject = ((JetClass) currentElement).getClassObject();
- entryPointContainer = classObject != null ? classObject.getObjectDeclaration() : null;
+ entryPointContainer = ((JetClass) currentElement).getClassObject();
}
if (entryPointContainer != null && mainFunctionDetector.hasMain(entryPointContainer.getDeclarations())) return entryPointContainer;
}
diff --git a/idea/src/org/jetbrains/kotlin/idea/util/psi/patternMatching/JetPsiUnifier.kt b/idea/src/org/jetbrains/kotlin/idea/util/psi/patternMatching/JetPsiUnifier.kt
index 887662fa90819..84e982162d3f3 100644
--- a/idea/src/org/jetbrains/kotlin/idea/util/psi/patternMatching/JetPsiUnifier.kt
+++ b/idea/src/org/jetbrains/kotlin/idea/util/psi/patternMatching/JetPsiUnifier.kt
@@ -72,7 +72,6 @@ import org.jetbrains.kotlin.psi.JetParameter
import org.jetbrains.kotlin.descriptors.ClassDescriptor
import org.jetbrains.kotlin.psi.JetClassOrObject
import org.jetbrains.kotlin.psi.JetCallableDeclaration
-import org.jetbrains.kotlin.psi.JetClassObject
import org.jetbrains.kotlin.psi.JetTypeParameter
import org.jetbrains.kotlin.descriptors.TypeParameterDescriptor
import org.jetbrains.kotlin.renderer.DescriptorRenderer
@@ -695,9 +694,6 @@ public class JetPsiUnifier(
e1 is JetMultiDeclaration && e2 is JetMultiDeclaration ->
if (matchMultiDeclarations(e1, e2)) null else UNMATCHED
- e1 is JetClassObject && e2 is JetClassObject ->
- e1.getObjectDeclaration().matchDeclarations(e2.getObjectDeclaration())
-
e1 is JetClassInitializer && e2 is JetClassInitializer ->
null
diff --git a/idea/testData/quickfix/suppress/declarationKinds/afterClassObject.kt b/idea/testData/quickfix/suppress/declarationKinds/afterClassObject.kt
index d6f897ae27ff9..593b6872e267c 100644
--- a/idea/testData/quickfix/suppress/declarationKinds/afterClassObject.kt
+++ b/idea/testData/quickfix/suppress/declarationKinds/afterClassObject.kt
@@ -1,4 +1,4 @@
-// "Suppress 'REDUNDANT_NULLABLE' for class object of C" "true"
+// "Suppress 'REDUNDANT_NULLABLE' for class object Default of C" "true"
class C {
[suppress("REDUNDANT_NULLABLE")]
diff --git a/idea/testData/quickfix/suppress/declarationKinds/beforeClassObject.kt b/idea/testData/quickfix/suppress/declarationKinds/beforeClassObject.kt
index 92865aee6a81f..c6b058626b934 100644
--- a/idea/testData/quickfix/suppress/declarationKinds/beforeClassObject.kt
+++ b/idea/testData/quickfix/suppress/declarationKinds/beforeClassObject.kt
@@ -1,4 +1,4 @@
-// "Suppress 'REDUNDANT_NULLABLE' for class object of C" "true"
+// "Suppress 'REDUNDANT_NULLABLE' for class object Default of C" "true"
class C {
class object {
diff --git a/idea/tests/org/jetbrains/kotlin/idea/stubs/DebugTextByStubTest.kt b/idea/tests/org/jetbrains/kotlin/idea/stubs/DebugTextByStubTest.kt
index 7fa69e4117f86..7afc5a3bf98ca 100644
--- a/idea/tests/org/jetbrains/kotlin/idea/stubs/DebugTextByStubTest.kt
+++ b/idea/tests/org/jetbrains/kotlin/idea/stubs/DebugTextByStubTest.kt
@@ -37,7 +37,6 @@ import org.jetbrains.kotlin.psi.stubs.KotlinPropertyStub
import kotlin.test.assertEquals
import org.jetbrains.kotlin.psi.JetClassBody
import org.jetbrains.kotlin.psi.JetClassInitializer
-import org.jetbrains.kotlin.psi.JetClassObject
import org.jetbrains.kotlin.psi.debugText.getDebugText
public class DebugTextByStubTest : LightCodeInsightFixtureTestCase() {
@@ -201,10 +200,10 @@ public class DebugTextByStubTest : LightCodeInsightFixtureTestCase() {
}
fun testClassObject() {
- val tree = createStubTree("class A { class object {} }")
+ val tree = createStubTree("class A { class object Def {} }")
val classObject = tree.findChildStubByType(JetStubElementTypes.CLASS)!!.findChildStubByType(JetStubElementTypes.CLASS_BODY)!!
- .findChildStubByType(JetStubElementTypes.CLASS_OBJECT)
- assertEquals("class object in STUB: class A", JetClassObject(classObject as KotlinPlaceHolderStub).getDebugText())
+ .findChildStubByType(JetStubElementTypes.OBJECT_DECLARATION)
+ assertEquals("STUB: class object Def", JetObjectDeclaration(classObject as KotlinObjectStub).getDebugText())
}
fun testPropertyAccessors() {
diff --git a/js/js.translator/src/org/jetbrains/kotlin/js/translate/declaration/DeclarationBodyVisitor.java b/js/js.translator/src/org/jetbrains/kotlin/js/translate/declaration/DeclarationBodyVisitor.java
index f4dfd6a04e6ac..0ebad4b0d21a7 100644
--- a/js/js.translator/src/org/jetbrains/kotlin/js/translate/declaration/DeclarationBodyVisitor.java
+++ b/js/js.translator/src/org/jetbrains/kotlin/js/translate/declaration/DeclarationBodyVisitor.java
@@ -84,8 +84,11 @@ public Void visitEnumEntry(@NotNull JetEnumEntry enumEntry, TranslationContext d
}
@Override
- public Void visitClassObject(@NotNull JetClassObject classObject, TranslationContext context) {
- JetObjectDeclaration declaration = classObject.getObjectDeclaration();
+ public Void visitObjectDeclaration(@NotNull JetObjectDeclaration declaration, TranslationContext context) {
+ if (!declaration.isClassObject()) {
+ // parsed it in initializer visitor => no additional actions are needed
+ return null;
+ }
JsExpression value = ClassTranslator.generateClassCreation(declaration, context);
ClassDescriptor descriptor = getClassDescriptor(context.bindingContext(), declaration);
@@ -94,12 +97,6 @@ public Void visitClassObject(@NotNull JetClassObject classObject, TranslationCon
return null;
}
- @Override
- public Void visitObjectDeclaration(@NotNull JetObjectDeclaration declaration, TranslationContext context) {
- // parsed it in initializer visitor => no additional actions are needed
- return null;
- }
-
@Override
public Void visitNamedFunction(@NotNull JetNamedFunction expression, TranslationContext context) {
FunctionDescriptor descriptor = getFunctionDescriptor(context.bindingContext(), expression);
diff --git a/js/js.translator/src/org/jetbrains/kotlin/js/translate/initializer/InitializerVisitor.java b/js/js.translator/src/org/jetbrains/kotlin/js/translate/initializer/InitializerVisitor.java
index 7ac2439807bcb..292ac2c0e995d 100644
--- a/js/js.translator/src/org/jetbrains/kotlin/js/translate/initializer/InitializerVisitor.java
+++ b/js/js.translator/src/org/jetbrains/kotlin/js/translate/initializer/InitializerVisitor.java
@@ -65,7 +65,9 @@ public Void visitDeclaration(@NotNull JetDeclaration expression, @NotNull Transl
@Override
public Void visitObjectDeclaration(@NotNull JetObjectDeclaration declaration, @NotNull TranslationContext context) {
- InitializerUtils.generateObjectInitializer(declaration, result, context);
+ if (!declaration.isClassObject()) {
+ InitializerUtils.generateObjectInitializer(declaration, result, context);
+ }
return null;
}
}
|
018e2cf16558108622c98fd82bcbde3ce14270db
|
camel
|
polished--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1209457 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/test/java/org/apache/camel/processor/ChoiceWhenBeanExpressionWithExceptionTest.java b/camel-core/src/test/java/org/apache/camel/processor/ChoiceWhenBeanExpressionWithExceptionTest.java
index 6d78e9948e485..203573a0fdfc5 100644
--- a/camel-core/src/test/java/org/apache/camel/processor/ChoiceWhenBeanExpressionWithExceptionTest.java
+++ b/camel-core/src/test/java/org/apache/camel/processor/ChoiceWhenBeanExpressionWithExceptionTest.java
@@ -36,7 +36,7 @@ protected void verifyGradeA(String endpointUri) throws Exception {
template.sendBody(endpointUri, new Student(95));
fail();
} catch (CamelExecutionException e) {
- e.printStackTrace();
+ // expected
}
assertMockEndpointsSatisfied();
}
|
46349a89d9d332e1f7c9f976f65efc79ea46cd29
|
hbase
|
HBASE-6667 TestCatalogJanitor occasionally fails;- PATCH THAT ADDS DEBUG AROUND FAILING TEST--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1379682 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/hbase
|
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java
index 9b076f315528..8c0f05084e62 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java
@@ -245,6 +245,7 @@ boolean cleanParent(final HRegionInfo parent, Result rowContent)
this.services.getAssignmentManager().regionOffline(parent);
}
FileSystem fs = this.services.getMasterFileSystem().getFileSystem();
+ LOG.debug("Archiving parent region:" + parent);
HFileArchiver.archiveRegion(fs, parent);
MetaEditor.deleteRegion(this.server.getCatalogTracker(), parent);
result = true;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
index 39ba8c75318c..377ef7363434 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
@@ -32,6 +32,8 @@
import java.util.SortedMap;
import java.util.TreeMap;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
@@ -76,6 +78,8 @@
@Category(SmallTests.class)
public class TestCatalogJanitor {
+ private static final Log LOG = LogFactory.getLog(TestCatalogJanitor.class);
+
/**
* Pseudo server for below tests.
* Be sure to call stop on the way out else could leave some mess around.
@@ -529,6 +533,10 @@ public void testScanDoesNotCleanRegionsWithExistingParents() throws Exception {
janitor.join();
}
+ /**
+ * Test that we correctly archive all the storefiles when a region is deleted
+ * @throws Exception
+ */
@Test
public void testArchiveOldRegion() throws Exception {
String table = "table";
@@ -546,10 +554,10 @@ public void testArchiveOldRegion() throws Exception {
HRegionInfo parent = new HRegionInfo(htd.getName(), Bytes.toBytes("aaa"), Bytes.toBytes("eee"));
HRegionInfo splita = new HRegionInfo(htd.getName(), Bytes.toBytes("aaa"), Bytes.toBytes("ccc"));
HRegionInfo splitb = new HRegionInfo(htd.getName(), Bytes.toBytes("ccc"), Bytes.toBytes("eee"));
+
// Test that when both daughter regions are in place, that we do not
// remove the parent.
- Result r = createResult(parent, splita, splitb);
-
+ Result parentMetaRow = createResult(parent, splita, splitb);
FileSystem fs = FileSystem.get(htu.getConfiguration());
Path rootdir = services.getMasterFileSystem().getRootDir();
// have to set the root directory since we use it in HFileDisposer to figure out to get to the
@@ -559,32 +567,53 @@ public void testArchiveOldRegion() throws Exception {
Path tabledir = HTableDescriptor.getTableDir(rootdir, htd.getName());
Path storedir = HStore.getStoreHomedir(tabledir, parent.getEncodedName(),
htd.getColumnFamilies()[0].getName());
-
- // delete the file and ensure that the files have been archived
Path storeArchive = HFileArchiveUtil.getStoreArchivePath(services.getConfiguration(), parent,
tabledir, htd.getColumnFamilies()[0].getName());
+ LOG.debug("Table dir:" + tabledir);
+ LOG.debug("Store dir:" + storedir);
+ LOG.debug("Store archive dir:" + storeArchive);
- // enable archiving, make sure that files get archived
- addMockStoreFiles(2, services, storedir);
+ // add a couple of store files that we can check for
+ FileStatus[] mockFiles = addMockStoreFiles(2, services, storedir);
// get the current store files for comparison
FileStatus[] storeFiles = fs.listStatus(storedir);
+ int index = 0;
for (FileStatus file : storeFiles) {
- System.out.println("Have store file:" + file.getPath());
+ LOG.debug("Have store file:" + file.getPath());
+ assertEquals("Got unexpected store file", mockFiles[index].getPath(),
+ storeFiles[index].getPath());
+ index++;
}
// do the cleaning of the parent
- assertTrue(janitor.cleanParent(parent, r));
+ assertTrue(janitor.cleanParent(parent, parentMetaRow));
+ LOG.debug("Finished cleanup of parent region");
// and now check to make sure that the files have actually been archived
FileStatus[] archivedStoreFiles = fs.listStatus(storeArchive);
+ logFiles("archived files", storeFiles);
+ logFiles("archived files", archivedStoreFiles);
+
assertArchiveEqualToOriginal(storeFiles, archivedStoreFiles, fs);
// cleanup
+ FSUtils.delete(fs, rootdir, true);
services.stop("Test finished");
- server.stop("shutdown");
+ server.stop("Test finished");
janitor.join();
}
+ /**
+ * @param description description of the files for logging
+ * @param storeFiles the status of the files to log
+ */
+ private void logFiles(String description, FileStatus[] storeFiles) {
+ LOG.debug("Current " + description + ": ");
+ for (FileStatus file : storeFiles) {
+ LOG.debug(file.getPath());
+ }
+ }
+
/**
* Test that if a store file with the same name is present as those already backed up cause the
* already archived files to be timestamped backup
@@ -657,7 +686,7 @@ public void testDuplicateHFileResolution() throws Exception {
janitor.join();
}
- private void addMockStoreFiles(int count, MasterServices services, Path storedir)
+ private FileStatus[] addMockStoreFiles(int count, MasterServices services, Path storedir)
throws IOException {
// get the existing store files
FileSystem fs = services.getMasterFileSystem().getFileSystem();
@@ -669,9 +698,11 @@ private void addMockStoreFiles(int count, MasterServices services, Path storedir
dos.writeBytes("Some data: " + i);
dos.close();
}
+ LOG.debug("Adding " + count + " store files to the storedir:" + storedir);
// make sure the mock store files are there
FileStatus[] storeFiles = fs.listStatus(storedir);
- assertEquals(count, storeFiles.length);
+ assertEquals("Didn't have expected store files", count, storeFiles.length);
+ return storeFiles;
}
private String setRootDirAndCleanIt(final HBaseTestingUtility htu,
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java
index 1b48cb7db92c..fa6c44b84f2d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java
@@ -85,29 +85,29 @@ public static boolean compareArchiveToOriginal(FileStatus[] previous, FileStatus
/**
* Compare the archived files to the files in the original directory
- * @param previous original files that should have been archived
- * @param archived files that were archived
+ * @param expected original files that should have been archived
+ * @param actual files that were archived
* @param fs filessystem on which the archiving took place
* @throws IOException
*/
- public static void assertArchiveEqualToOriginal(FileStatus[] previous, FileStatus[] archived,
+ public static void assertArchiveEqualToOriginal(FileStatus[] expected, FileStatus[] actual,
FileSystem fs) throws IOException {
- assertArchiveEqualToOriginal(previous, archived, fs, false);
+ assertArchiveEqualToOriginal(expected, actual, fs, false);
}
/**
* Compare the archived files to the files in the original directory
- * @param previous original files that should have been archived
- * @param archived files that were archived
+ * @param expected original files that should have been archived
+ * @param actual files that were archived
* @param fs {@link FileSystem} on which the archiving took place
* @param hasTimedBackup <tt>true</tt> if we expect to find an archive backup directory with a
* copy of the files in the archive directory (and the original files).
* @throws IOException
*/
- public static void assertArchiveEqualToOriginal(FileStatus[] previous, FileStatus[] archived,
+ public static void assertArchiveEqualToOriginal(FileStatus[] expected, FileStatus[] actual,
FileSystem fs, boolean hasTimedBackup) throws IOException {
- List<List<String>> lists = getFileLists(previous, archived);
+ List<List<String>> lists = getFileLists(expected, actual);
List<String> original = lists.get(0);
Collections.sort(original);
|
271eda44a52f5aa8dc1c9dac3679ec3fd0596eab
|
ReactiveX-RxJava
|
Fixed byLine test to use line.separator system- property instead of \n.--
|
c
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-contrib/rxjava-string/src/test/java/rx/observables/StringObservableTest.java b/rxjava-contrib/rxjava-string/src/test/java/rx/observables/StringObservableTest.java
index 01dcc8f435..566ee92666 100644
--- a/rxjava-contrib/rxjava-string/src/test/java/rx/observables/StringObservableTest.java
+++ b/rxjava-contrib/rxjava-string/src/test/java/rx/observables/StringObservableTest.java
@@ -247,7 +247,9 @@ public void testFromReader() {
@Test
public void testByLine() {
- List<Line> lines = StringObservable.byLine(Observable.from(Arrays.asList("qwer", "\nasdf\n", "zx", "cv"))).toList().toBlockingObservable().single();
+ String newLine = System.getProperty("line.separator");
+
+ List<Line> lines = StringObservable.byLine(Observable.from(Arrays.asList("qwer", newLine + "asdf" + newLine, "zx", "cv"))).toList().toBlockingObservable().single();
assertEquals(Arrays.asList(new Line(0, "qwer"), new Line(1, "asdf"), new Line(2, "zxcv")), lines);
}
|
ce8dcc887659ef6b378f2c0650352d67b61b2e7f
|
intellij-community
|
Replace instanceof template variable with- variable segment--
|
p
|
https://github.com/JetBrains/intellij-community
|
diff --git a/src/org/jetbrains/postfixCompletion/templates/InstanceofExpressionPostfixTemplate.java b/src/org/jetbrains/postfixCompletion/templates/InstanceofExpressionPostfixTemplate.java
index c7552c799583c..ca40de9048332 100644
--- a/src/org/jetbrains/postfixCompletion/templates/InstanceofExpressionPostfixTemplate.java
+++ b/src/org/jetbrains/postfixCompletion/templates/InstanceofExpressionPostfixTemplate.java
@@ -93,7 +93,7 @@ public Result calculateQuickResult(ExpressionContext context) {
String type = "type";
template.addVariable(type, expr, expr, true);
template.addTextSegment(" ? ((");
- template.addVariable(type, expr, expr, true);
+ template.addVariableSegment(type);
template.addTextSegment(")" + exprText + ")");
template.addEndVariable();
template.addTextSegment(" : null;");
|
c7c8f2fe48d3a16ac70fb98f662d3d77292ba0cd
|
hadoop
|
MAPREDUCE-2603. Disable High-Ram emulation in- system tests. (Vinay Kumar Thota via amarrk)--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1138301 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/mapreduce/CHANGES.txt b/mapreduce/CHANGES.txt
index 988c40eda0b45..75c454e91447b 100644
--- a/mapreduce/CHANGES.txt
+++ b/mapreduce/CHANGES.txt
@@ -186,6 +186,9 @@ Trunk (unreleased changes)
BUG FIXES
+ MAPREDUCE-2603. Disable High-Ram emulation in system tests.
+ (Vinay Kumar Thota via amarrk)
+
MAPREDUCE-2539. Fixed NPE in getMapTaskReports in JobClient. (Robert Evans via
acmurthy)
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationEnableForAllTypesOfJobs.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationEnableForAllTypesOfJobs.java
index 4144bae842e87..3ade9e34e687b 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationEnableForAllTypesOfJobs.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationEnableForAllTypesOfJobs.java
@@ -56,6 +56,7 @@ public void testInputCompressionEmualtionEnableForAllJobsWithDefaultRatios()
final String [] otherArgs = {
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=true",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_INPUT_DECOMPRESS_ENABLE + "=true",
"-D", GridMixConfig.GRIDMIX_INPUT_COMPRESS_RATIO + "=0.46",
"-D", GridMixConfig.GRIDMIX_INTERMEDIATE_COMPRESSION_RATIO + "=0.35",
@@ -84,6 +85,7 @@ public void testInputCompressionEmulationEnableForAllJobsWithCustomRatios()
final String [] otherArgs = {
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false"
};
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationForCompressInAndUncompressOut.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationForCompressInAndUncompressOut.java
index 6f0dcbff0f056..4b7fc3a15aada 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationForCompressInAndUncompressOut.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationForCompressInAndUncompressOut.java
@@ -56,6 +56,7 @@ public void testCompressionEmulationOfCompressedInputWithDefaultRatios()
final String [] otherArgs = {
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=true"
};
@@ -85,6 +86,7 @@ public void testCompressionEmulationOfCompressedInputWithCustomRatios()
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=true",
"-D", GridMixConfig.GRIDMIX_INPUT_DECOMPRESS_ENABLE + "=true",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_INPUT_COMPRESS_RATIO + "=0.58",
"-D", GridMixConfig.GRIDMIX_INTERMEDIATE_COMPRESSION_RATIO + "=0.42"
};
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationForUncompressInAndCompressOut.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationForUncompressInAndCompressOut.java
index 70dc0d1276451..383fc83de4b1d 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationForUncompressInAndCompressOut.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationForUncompressInAndCompressOut.java
@@ -54,6 +54,7 @@ public void testCompressionEmulationOfCompressedOuputWithDefaultRatios()
final String [] otherArgs = {
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=true"
};
@@ -82,6 +83,7 @@ public void testCompressionEmulationOfCompressedOutputWithCustomRatios()
final String [] otherArgs = {
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=true",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_OUTPUT_COMPRESSION_RATIO + "=0.38"
};
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSAndLocalFSDCFiles.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSAndLocalFSDCFiles.java
index d98b259177ab4..a1ae1e9dfafe7 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSAndLocalFSDCFiles.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSAndLocalFSDCFiles.java
@@ -59,6 +59,7 @@ public void testGenerateDataEmulateHDFSAndLocalFSDCFiles()
final String [] otherArgs = {
"-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false",
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false"
};
runGridmixAndVerify(runtimeValues, otherArgs, tracePath,
@@ -85,6 +86,7 @@ public void testEmulationOfHDFSAndLocalFSDCFiles()
final String [] otherArgs = {
"-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false",
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false"
};
runGridmixAndVerify(runtimeValues, otherArgs, tracePath,
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSDCFileUsesMultipleJobs.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSDCFileUsesMultipleJobs.java
index 00d2e4825a2a2..7f8938f88a742 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSDCFileUsesMultipleJobs.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSDCFileUsesMultipleJobs.java
@@ -58,6 +58,7 @@ public void testGenerateAndEmulationOfHDFSDCFile()
final String [] otherArgs = {
"-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true"
};
runGridmixAndVerify(runtimeValues, otherArgs, tracePath,
@@ -81,6 +82,7 @@ public void testGridmixEmulationOfHDFSPublicDCFile()
tracePath};
final String [] otherArgs = {
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true"
};
runGridmixAndVerify(runtimeValues, otherArgs, tracePath,
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSDCFilesWithDifferentVisibilities.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSDCFilesWithDifferentVisibilities.java
index 3840f1bbeafa0..453e5b990815b 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSDCFilesWithDifferentVisibilities.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSDCFilesWithDifferentVisibilities.java
@@ -60,6 +60,7 @@ public void testGenerateAndEmulateOfHDFSDCFilesWithDiffVisibilities()
final String [] otherArgs = {
"-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true"
};
runGridmixAndVerify(runtimeValues, otherArgs, tracePath,
@@ -81,6 +82,7 @@ public void testHDFSDCFilesWithoutEnableDCEmulation()
"REPLAY",
tracePath};
final String [] otherArgs = {
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false"
};
runGridmixAndVerify(runtimeValues, otherArgs, tracePath,
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfLocalFSDCFiles.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfLocalFSDCFiles.java
index e50eb6e2e8138..eff47f2d64134 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfLocalFSDCFiles.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfLocalFSDCFiles.java
@@ -57,6 +57,7 @@ public void testGenerateInputAndEmulateLocalFSDCFile()
final String [] otherArgs = {
"-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false",
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false"
};
runGridmixAndVerify(runtimeValues, otherArgs, tracePath,
@@ -83,6 +84,7 @@ public void testEmulationOfLocalFSDCFile()
final String [] otherArgs = {
"-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false",
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false"
};
runGridmixAndVerify(runtimeValues, otherArgs, tracePath,
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridMixDataGeneration.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridMixDataGeneration.java
index f1501bf850b77..ef273b5fd2519 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridMixDataGeneration.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridMixDataGeneration.java
@@ -93,6 +93,7 @@ public void testGenerateDataWithSTRESSSubmission() throws Exception {
String [] otherArgs = {
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false"
};
int exitCode =
@@ -123,6 +124,7 @@ public void testGenerateDataWithREPLAYSubmission() throws Exception {
String [] otherArgs = {
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false"
};
@@ -154,6 +156,7 @@ public void testGenerateDataWithSERIALSubmission() throws Exception {
long bytesPerFile = 200 * 1024 * 1024; // 200 mb per file of data
String [] otherArgs = {
"-D", GridMixConfig.GRIDMIX_BYTES_PER_FILE + "=" + bytesPerFile,
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false"
};
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridMixFilePool.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridMixFilePool.java
index 1ad10d8af50fe..883feec88fcbe 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridMixFilePool.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridMixFilePool.java
@@ -80,6 +80,7 @@ public void testFilesCountAndSizesForSpecifiedFilePool() throws Exception {
String [] otherArgs = {
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false"
};
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixCompressionEmulationWithCompressInput.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixCompressionEmulationWithCompressInput.java
index adaa0d2363be8..3fdd16d7f6f90 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixCompressionEmulationWithCompressInput.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixCompressionEmulationWithCompressInput.java
@@ -59,6 +59,7 @@ public void testGridmixCompressionRatiosAgainstDefaultCompressionRatio()
final String [] otherArgs = {
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=true"
};
runGridmixAndVerify(runtimeValues, otherArgs, tracePath,
@@ -89,6 +90,7 @@ public void testGridmixOuputCompressionRatiosAgainstCustomRatios()
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=true",
"-D", GridMixConfig.GRIDMIX_INPUT_DECOMPRESS_ENABLE + "=true",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_INPUT_COMPRESS_RATIO + "=0.68",
"-D", GridMixConfig.GRIDMIX_INTERMEDIATE_COMPRESSION_RATIO + "=0.35",
"-D", GridMixConfig.GRIDMIX_OUTPUT_COMPRESSION_RATIO + "=0.40"
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfHDFSPrivateDCFile.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfHDFSPrivateDCFile.java
index 5289bf3c8a140..e6c7e6af46ba3 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfHDFSPrivateDCFile.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfHDFSPrivateDCFile.java
@@ -56,6 +56,7 @@ public void testGenerateAndEmulateOfHDFSPrivateDCFile()
final String [] otherArgs = {
"-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true"
};
runGridmixAndVerify(runtimeValues, otherArgs, tracePath,
@@ -78,6 +79,7 @@ public void testGridmixEmulationOfHDFSPrivateDCFile()
"REPLAY",
tracePath};
final String [] otherArgs = {
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true"
};
runGridmixAndVerify(runtimeValues, otherArgs, tracePath,
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfHDFSPublicDCFile.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfHDFSPublicDCFile.java
index e12180c72e428..0bf07fdf4d208 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfHDFSPublicDCFile.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfHDFSPublicDCFile.java
@@ -55,6 +55,7 @@ public void testGenerateAndEmulationOfSingleHDFSDCFile()
final String [] otherArgs = {
"-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true"
};
runGridmixAndVerify(runtimeValues, otherArgs, tracePath,
@@ -80,6 +81,7 @@ public void testGridmixEmulationOfSingleHDFSPublicDCFile()
tracePath};
final String [] otherArgs = {
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true"
};
runGridmixAndVerify(runtimeValues, otherArgs, tracePath,
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfMultipleHDFSPrivateDCFiles.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfMultipleHDFSPrivateDCFiles.java
index 4dca1a214ce82..5f464ce39be56 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfMultipleHDFSPrivateDCFiles.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfMultipleHDFSPrivateDCFiles.java
@@ -58,6 +58,7 @@ public void testGenerateAndEmulationOfMultipleHDFSPrivateDCFiles()
tracePath};
final String [] otherArgs = {
"-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true"
};
runGridmixAndVerify(runtimeValues, otherArgs, tracePath,
@@ -81,6 +82,7 @@ public void testGridmixEmulationOfMultipleHDFSPrivateDCFiles()
"STRESS",
tracePath};
final String [] otherArgs = {
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true"
};
runGridmixAndVerify(runtimeValues, otherArgs, tracePath,
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfMultipleHDFSPublicDCFiles.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfMultipleHDFSPublicDCFiles.java
index 09bbf181226c1..cca5da83ecb48 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfMultipleHDFSPublicDCFiles.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfMultipleHDFSPublicDCFiles.java
@@ -59,6 +59,7 @@ public void testGenerateAndEmulationOfMultipleHDFSDCFiles()
final String [] otherArgs = {
"-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true"
};
runGridmixAndVerify(runtimeValues, otherArgs, tracePath,
@@ -81,6 +82,7 @@ public void testGridmixEmulationOfMulitpleHDFSPublicDCFile()
tracePath};
final String [] otherArgs = {
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true"
};
runGridmixAndVerify(runtimeValues, otherArgs, tracePath,
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith10minTrace.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith10minTrace.java
index c48a7461d3fd2..ec11a2b36e66c 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith10minTrace.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith10minTrace.java
@@ -55,6 +55,7 @@ public void testGridmixWith10minTrace() throws Exception {
String [] otherArgs = {
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_MINIMUM_FILE_SIZE + "=" + minFileSize,
"-D", GridMixConfig.GRIDMIX_JOB_SUBMISSION_QUEUE_IN_TRACE + "=false",
"-D", GridMixConfig.GRIDMIX_SLEEPJOB_MAPTASK_ONLY + "=true",
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith12minTrace.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith12minTrace.java
index ec2a1377bd6cb..9bcb45a3fbb6c 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith12minTrace.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith12minTrace.java
@@ -51,6 +51,7 @@ public void testGridmixWith12minTrace() throws Exception {
String [] otherArgs = {
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_SLEEP_MAP_MAX_TIME + "=10",
"-D", GridMixConfig.GRIDMIX_SLEEP_REDUCE_MAX_TIME + "=5"
};
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith1minTrace.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith1minTrace.java
index ed2648448fa5a..c583e6d3a29fc 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith1minTrace.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith1minTrace.java
@@ -49,6 +49,7 @@ public void testGridmixWith1minTrace() throws Exception {
String [] otherArgs = {
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false"
};
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith2minStreamingJobTrace.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith2minStreamingJobTrace.java
index 9628dd2db8d6c..d9fb7c70f7f84 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith2minStreamingJobTrace.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith2minStreamingJobTrace.java
@@ -56,6 +56,7 @@ public void testGridmixWith2minStreamJobTrace() throws Exception {
"-D", GridMixConfig.GRIDMIX_JOB_SUBMISSION_QUEUE_IN_TRACE + "=true",
"-D", GridMixConfig.GRIDMIX_MINIMUM_FILE_SIZE + "=" + minFileSize,
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false"
};
runGridmixAndVerify(runtimeValues, otherArgs, tracePath);
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minStreamingJobTrace.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minStreamingJobTrace.java
index 926f795b747df..85dedf6675f96 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minStreamingJobTrace.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minStreamingJobTrace.java
@@ -59,6 +59,7 @@ public void testGridmixWith3minStreamJobTrace() throws Exception {
"-D", GridMixConfig.GRIDMIX_JOB_SUBMISSION_QUEUE_IN_TRACE + "=true",
"-D", GridMixConfig.GRIDMIX_BYTES_PER_FILE + "=" + bytesPerFile,
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false"
};
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minTrace.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minTrace.java
index bed33d0dd3ebc..5f2171fb40196 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minTrace.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minTrace.java
@@ -52,6 +52,7 @@ public void testGridmixWith3minTrace() throws Exception {
String [] otherArgs = {
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false"
};
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minStreamingJobTrace.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minStreamingJobTrace.java
index 370f120aa961a..ef1878c0855b5 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minStreamingJobTrace.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minStreamingJobTrace.java
@@ -56,6 +56,7 @@ public void testGridmixWith5minStreamJobTrace() throws Exception {
"-D", GridMixConfig.GRIDMIX_KEY_FRC + "=0.5f",
"-D", GridMixConfig.GRIDMIX_BYTES_PER_FILE + "=" + bytesPerFile,
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false"
};
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minTrace.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minTrace.java
index 5a141d4c8153d..c55167e3b4f51 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minTrace.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minTrace.java
@@ -51,6 +51,7 @@ public void testGridmixWith5minTrace() throws Exception {
String [] otherArgs = {
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_MINIMUM_FILE_SIZE + "=" + minFileSize
};
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith7minTrace.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith7minTrace.java
index 0791d68aee28b..55be37b17dd89 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith7minTrace.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith7minTrace.java
@@ -51,6 +51,7 @@ public void testGridmixWith7minTrace() throws Exception {
String [] otherArgs = {
"-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false",
+ "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false",
"-D", GridMixConfig.GRIDMIX_MINIMUM_FILE_SIZE + "=" + minFileSize,
"-D", GridMixConfig.GRIDMIX_JOB_SUBMISSION_QUEUE_IN_TRACE + "=false"
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobVerification.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobVerification.java
index 46872b2418cd4..ae71ec5764bde 100644
--- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobVerification.java
+++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobVerification.java
@@ -316,7 +316,9 @@ public JobConf getSimulatedJobConf(JobID simulatedJobID, File tmpJHFolder)
Path jhpath = new Path(historyFilePath);
fs = jhpath.getFileSystem(conf);
fs.copyToLocalFile(jhpath,new Path(tmpJHFolder.toString()));
- fs.copyToLocalFile(new Path(historyFilePath + "_conf.xml"),
+ String historyPath =
+ historyFilePath.substring(0,historyFilePath.lastIndexOf("_"));
+ fs.copyToLocalFile(new Path(historyPath + "_conf.xml"),
new Path(tmpJHFolder.toString()));
JobConf jobConf = new JobConf();
jobConf.addResource(new Path(tmpJHFolder.toString()
diff --git a/mapreduce/src/contrib/gridmix/src/test/system/resources/highram_mr_jobs_case4.json.gz b/mapreduce/src/contrib/gridmix/src/test/system/resources/highram_mr_jobs_case4.json.gz
index c4d4657c3cb7f..229d8d321bc4a 100644
Binary files a/mapreduce/src/contrib/gridmix/src/test/system/resources/highram_mr_jobs_case4.json.gz and b/mapreduce/src/contrib/gridmix/src/test/system/resources/highram_mr_jobs_case4.json.gz differ
|
ecea6e2615d9c1990e40613d5332e1f2d674a5b5
|
orientdb
|
Issue -1404 WAL durability test improvements.--
|
p
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/test/java/com/orientechnologies/orient/core/storage/impl/local/paginated/LocalPaginatedStorageRestoreFromWAL.java b/core/src/test/java/com/orientechnologies/orient/core/storage/impl/local/paginated/LocalPaginatedStorageRestoreFromWAL.java
index 65cf3753dbf..e50d40304f2 100755
--- a/core/src/test/java/com/orientechnologies/orient/core/storage/impl/local/paginated/LocalPaginatedStorageRestoreFromWAL.java
+++ b/core/src/test/java/com/orientechnologies/orient/core/storage/impl/local/paginated/LocalPaginatedStorageRestoreFromWAL.java
@@ -76,8 +76,9 @@ public void beforeMethod() {
if (baseDocumentTx.exists()) {
baseDocumentTx.open("admin", "admin");
baseDocumentTx.drop();
- } else
- baseDocumentTx.create();
+ }
+
+ baseDocumentTx.create();
createSchema(baseDocumentTx);
}
@@ -166,69 +167,74 @@ public class DataPropagationTask implements Callable<Void> {
public Void call() throws Exception {
Random random = new Random();
- ODatabaseRecordThreadLocal.INSTANCE.set(baseDocumentTx);
- List<ORID> testTwoList = new ArrayList<ORID>();
- List<ORID> firstDocs = new ArrayList<ORID>();
+ final ODatabaseDocumentTx db = new ODatabaseDocumentTx(baseDocumentTx.getURL());
+ db.open("admin", "admin");
+ try {
+ List<ORID> testTwoList = new ArrayList<ORID>();
+ List<ORID> firstDocs = new ArrayList<ORID>();
- OClass classOne = baseDocumentTx.getMetadata().getSchema().getClass("TestOne");
- OClass classTwo = baseDocumentTx.getMetadata().getSchema().getClass("TestTwo");
+ OClass classOne = db.getMetadata().getSchema().getClass("TestOne");
+ OClass classTwo = db.getMetadata().getSchema().getClass("TestTwo");
- for (int i = 0; i < 1000; i++) {
- ODocument docOne = new ODocument(classOne);
- docOne.field("intProp", random.nextInt());
+ for (int i = 0; i < 1000; i++) {
+ ODocument docOne = new ODocument(classOne);
+ docOne.field("intProp", random.nextInt());
- byte[] stringData = new byte[256];
- random.nextBytes(stringData);
- String stringProp = new String(stringData);
+ byte[] stringData = new byte[256];
+ random.nextBytes(stringData);
+ String stringProp = new String(stringData);
- docOne.field("stringProp", stringProp);
+ docOne.field("stringProp", stringProp);
- Set<String> stringSet = new HashSet<String>();
- for (int n = 0; n < 5; n++) {
- stringSet.add("str" + random.nextInt());
- }
- docOne.field("stringSet", stringSet);
+ Set<String> stringSet = new HashSet<String>();
+ for (int n = 0; n < 5; n++) {
+ stringSet.add("str" + random.nextInt());
+ }
+ docOne.field("stringSet", stringSet);
- docOne.save();
+ docOne.save();
- firstDocs.add(docOne.getIdentity());
+ firstDocs.add(docOne.getIdentity());
- if (random.nextBoolean()) {
- ODocument docTwo = new ODocument(classTwo);
+ if (random.nextBoolean()) {
+ ODocument docTwo = new ODocument(classTwo);
- List<String> stringList = new ArrayList<String>();
+ List<String> stringList = new ArrayList<String>();
- for (int n = 0; n < 5; n++) {
- stringList.add("strnd" + random.nextInt());
- }
+ for (int n = 0; n < 5; n++) {
+ stringList.add("strnd" + random.nextInt());
+ }
- docTwo.field("stringList", stringList);
- docTwo.save();
+ docTwo.field("stringList", stringList);
+ docTwo.save();
- testTwoList.add(docTwo.getIdentity());
- }
+ testTwoList.add(docTwo.getIdentity());
+ }
- if (!testTwoList.isEmpty()) {
- int startIndex = random.nextInt(testTwoList.size());
- int endIndex = random.nextInt(testTwoList.size() - startIndex) + startIndex;
+ if (!testTwoList.isEmpty()) {
+ int startIndex = random.nextInt(testTwoList.size());
+ int endIndex = random.nextInt(testTwoList.size() - startIndex) + startIndex;
- Map<String, ORID> linkMap = new HashMap<String, ORID>();
+ Map<String, ORID> linkMap = new HashMap<String, ORID>();
- for (int n = startIndex; n < endIndex; n++) {
- ORID docTwoRid = testTwoList.get(n);
- linkMap.put(docTwoRid.toString(), docTwoRid);
- }
+ for (int n = startIndex; n < endIndex; n++) {
+ ORID docTwoRid = testTwoList.get(n);
+ linkMap.put(docTwoRid.toString(), docTwoRid);
+ }
- docOne.field("linkMap", linkMap);
- docOne.save();
- }
+ docOne.field("linkMap", linkMap);
+ docOne.save();
+ }
- boolean deleteDoc = random.nextDouble() <= 0.2;
- if (deleteDoc) {
- ORID rid = firstDocs.remove(random.nextInt(firstDocs.size()));
- baseDocumentTx.delete(rid);
+ boolean deleteDoc = random.nextDouble() <= 0.2;
+ if (deleteDoc) {
+ ORID rid = firstDocs.remove(random.nextInt(firstDocs.size()));
+ db.delete(rid);
+ }
}
+ } finally {
+ db.close();
}
return null;
|
21f80973533b8c64032d864d8299bcb05f54445e
|
hbase
|
HBASE-2040 Fixes to group commit--git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@889775 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index 1396c5306bca..04cb8e73780e 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -123,6 +123,7 @@ Release 0.21.0 - Unreleased
empty oldlogfile.log (Lars George via Stack)
HBASE-2022 NPE in housekeeping kills RS
HBASE-2033 Shell scan 'limit' is off by one
+ HBASE-2040 Fixes to group commit
IMPROVEMENTS
HBASE-1760 Cleanup TODOs in HTable
diff --git a/src/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java b/src/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
index 7ba395d51aa4..a17baaeb3e94 100644
--- a/src/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
+++ b/src/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
@@ -237,7 +237,7 @@ public HLog(final FileSystem fs, final Path dir, final HBaseConfiguration conf,
", flushlogentries=" + this.flushlogentries +
", optionallogflushinternal=" + this.optionalFlushInterval + "ms");
rollWriter();
- logSyncerThread = new LogSyncer(this.flushlogentries);
+ logSyncerThread = new LogSyncer(this.optionalFlushInterval);
Threads.setDaemonThreadRunning(logSyncerThread,
Thread.currentThread().getName() + ".logSyncer");
}
@@ -726,9 +726,9 @@ class LogSyncer extends Thread {
// Condition used to signal that the sync is done
private final Condition syncDone = lock.newCondition();
- private final int optionalFlushInterval;
+ private final long optionalFlushInterval;
- LogSyncer(int optionalFlushInterval) {
+ LogSyncer(long optionalFlushInterval) {
this.optionalFlushInterval = optionalFlushInterval;
}
@@ -739,7 +739,12 @@ public void run() {
// Wait until something has to be synced or do it if we waited enough
// time (useful if something appends but does not sync).
- queueEmpty.await(this.optionalFlushInterval, TimeUnit.MILLISECONDS);
+ if (!queueEmpty.await(this.optionalFlushInterval,
+ TimeUnit.MILLISECONDS)) {
+ forceSync = true;
+ }
+
+
// We got the signal, let's syncFS. We currently own the lock so new
// writes are waiting to acquire it in addToSyncQueue while the ones
|
9a5abf368f2c5ef996b8cb7185719bcb78fe2b36
|
kotlin
|
Constraint incorporation--In a constraint system a new bound is incorporated:-all new constrains that can be derived from it-(and from existing ones) are added-
|
a
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/Errors.java b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/Errors.java
index 078556686d8ca..f337d9fe35da3 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/Errors.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/Errors.java
@@ -459,6 +459,7 @@ enum BadNamedArgumentsTarget {
DiagnosticFactory1<PsiElement, InferenceErrorData> TYPE_INFERENCE_CONFLICTING_SUBSTITUTIONS = DiagnosticFactory1.create(ERROR);
DiagnosticFactory1<PsiElement, InferenceErrorData> TYPE_INFERENCE_CANNOT_CAPTURE_TYPES = DiagnosticFactory1.create(ERROR);
DiagnosticFactory1<PsiElement, InferenceErrorData> TYPE_INFERENCE_TYPE_CONSTRUCTOR_MISMATCH = DiagnosticFactory1.create(ERROR);
+ DiagnosticFactory0<PsiElement> TYPE_INFERENCE_INCORPORATION_ERROR = DiagnosticFactory0.create(ERROR);
DiagnosticFactory1<PsiElement, InferenceErrorData> TYPE_INFERENCE_UPPER_BOUND_VIOLATED = DiagnosticFactory1.create(ERROR);
DiagnosticFactory2<JetExpression, JetType, JetType> TYPE_INFERENCE_EXPECTED_TYPE_MISMATCH = DiagnosticFactory2.create(ERROR);
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/DefaultErrorMessages.java b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/DefaultErrorMessages.java
index 5eed0c12740ba..b1ae845790f72 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/DefaultErrorMessages.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/DefaultErrorMessages.java
@@ -593,6 +593,7 @@ public String render(@NotNull Boolean hasValueParameters) {
MAP.put(TYPE_INFERENCE_CANNOT_CAPTURE_TYPES, "Type inference failed: {0}", TYPE_INFERENCE_CANNOT_CAPTURE_TYPES_RENDERER);
MAP.put(TYPE_INFERENCE_NO_INFORMATION_FOR_PARAMETER, "Type inference failed: {0}", TYPE_INFERENCE_NO_INFORMATION_FOR_PARAMETER_RENDERER);
MAP.put(TYPE_INFERENCE_TYPE_CONSTRUCTOR_MISMATCH, "Type inference failed: {0}", TYPE_INFERENCE_TYPE_CONSTRUCTOR_MISMATCH_RENDERER);
+ MAP.put(TYPE_INFERENCE_INCORPORATION_ERROR, "Type inference failed. Please try to specify type arguments explicitly.");
MAP.put(TYPE_INFERENCE_UPPER_BOUND_VIOLATED, "{0}", TYPE_INFERENCE_UPPER_BOUND_VIOLATED_RENDERER);
MAP.put(TYPE_INFERENCE_EXPECTED_TYPE_MISMATCH, "Type inference failed. Expected type mismatch: found: {1} required: {0}", RENDER_TYPE, RENDER_TYPE);
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/Renderers.kt b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/Renderers.kt
index f08c04f856b7e..2a1061b0c49d8 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/Renderers.kt
+++ b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/Renderers.kt
@@ -228,7 +228,7 @@ public object Renderers {
return result
.text(newText().normal("Not enough information to infer parameter ")
- .strong(firstUnknownParameter!!.getName())
+ .strong(firstUnknownParameter.getName())
.normal(" in "))
.table(newTable()
.descriptor(inferenceErrorData.descriptor)
@@ -246,7 +246,7 @@ public object Renderers {
val systemWithoutWeakConstraints = constraintSystem.getSystemWithoutWeakConstraints()
val typeParameterDescriptor = inferenceErrorData.descriptor.getTypeParameters().firstOrNull {
- !ConstraintsUtil.checkUpperBoundIsSatisfied(systemWithoutWeakConstraints, it, true)
+ !ConstraintsUtil.checkUpperBoundIsSatisfied(systemWithoutWeakConstraints, it, true)
}
if (typeParameterDescriptor == null && status.hasConflictingConstraints()) {
return renderConflictingSubstitutionsInferenceError(inferenceErrorData, result)
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/ArgumentTypeResolver.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/ArgumentTypeResolver.java
index e3ad15b08efcb..6d57f4c0e687a 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/ArgumentTypeResolver.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/ArgumentTypeResolver.java
@@ -55,7 +55,7 @@
import static org.jetbrains.kotlin.resolve.calls.CallResolverUtil.ResolveArgumentsMode.SHAPE_FUNCTION_ARGUMENTS;
import static org.jetbrains.kotlin.resolve.calls.context.ContextDependency.DEPENDENT;
import static org.jetbrains.kotlin.resolve.calls.context.ContextDependency.INDEPENDENT;
-import static org.jetbrains.kotlin.resolve.calls.inference.InferencePackage.createCorrespondingFunctionTypeForFunctionPlaceholder;
+import static org.jetbrains.kotlin.resolve.calls.inference.InferencePackage.createTypeForFunctionPlaceholder;
import static org.jetbrains.kotlin.types.TypeUtils.DONT_CARE;
import static org.jetbrains.kotlin.types.TypeUtils.NO_EXPECTED_TYPE;
@@ -85,7 +85,7 @@ public static boolean isSubtypeOfForArgumentType(
@NotNull JetType expectedType
) {
if (ErrorUtils.isFunctionPlaceholder(actualType)) {
- JetType functionType = createCorrespondingFunctionTypeForFunctionPlaceholder(actualType, expectedType);
+ JetType functionType = createTypeForFunctionPlaceholder(actualType, expectedType);
return JetTypeChecker.DEFAULT.isSubtypeOf(functionType, expectedType);
}
return JetTypeChecker.DEFAULT.isSubtypeOf(actualType, expectedType);
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/CallCompleter.kt b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/CallCompleter.kt
index 10f4721271228..1a0700d81190f 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/CallCompleter.kt
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/CallCompleter.kt
@@ -44,6 +44,9 @@ import org.jetbrains.kotlin.types.TypeUtils
import org.jetbrains.kotlin.types.expressions.DataFlowUtils
import org.jetbrains.kotlin.types.expressions.ExpressionTypingUtils
import java.util.ArrayList
+import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.ConstraintPositionKind.*
+import org.jetbrains.kotlin.resolve.calls.model.*
+import org.jetbrains.kotlin.types.ErrorUtils
public class CallCompleter(
val argumentTypeResolver: ArgumentTypeResolver,
@@ -151,8 +154,6 @@ public class CallCompleter(
}
}
- (getConstraintSystem() as ConstraintSystemImpl).processDeclaredBoundConstraints()
-
if (returnType != null && expectedType === TypeUtils.UNIT_EXPECTED_TYPE) {
updateSystemIfSuccessful {
system ->
@@ -161,6 +162,9 @@ public class CallCompleter(
}
}
+ val constraintSystem = getConstraintSystem() as ConstraintSystemImpl
+ constraintSystem.fixVariables()
+
setResultingSubstitutor(getConstraintSystem()!!.getResultingSubstitutor())
}
@@ -280,7 +284,8 @@ public class CallCompleter(
argumentExpression: JetExpression,
trace: BindingTrace
): JetType? {
- if (recordedType == updatedType || updatedType == null) return updatedType
+ //workaround for KT-8218
+ if ((!ErrorUtils.containsErrorType(recordedType) && recordedType == updatedType) || updatedType == null) return updatedType
fun deparenthesizeOrGetSelector(expression: JetExpression?): JetExpression? {
val deparenthesized = JetPsiUtil.deparenthesizeOnce(expression, /* deparenthesizeBinaryExpressionWithTypeRHS = */ false)
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/GenericCandidateResolver.kt b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/GenericCandidateResolver.kt
index 86b56658ea0dd..b784dcb995323 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/GenericCandidateResolver.kt
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/GenericCandidateResolver.kt
@@ -70,11 +70,9 @@ class GenericCandidateResolver(
// Thus, we replace the parameters of our descriptor with fresh objects (perform alpha-conversion)
val candidateWithFreshVariables = FunctionDescriptorUtil.alphaConvertTypeParameters(candidate)
- val typeVariables = Maps.newLinkedHashMap<TypeParameterDescriptor, Variance>()
- for (typeParameterDescriptor in candidateWithFreshVariables.getTypeParameters()) {
- typeVariables.put(typeParameterDescriptor, Variance.INVARIANT) // TODO: variance of the occurrences
- }
- constraintSystem.registerTypeVariables(typeVariables)
+ val backConversion = candidateWithFreshVariables.getTypeParameters().zip(candidate.getTypeParameters()).toMap()
+
+ constraintSystem.registerTypeVariables(candidateWithFreshVariables.getTypeParameters(), { Variance.INVARIANT })
val substituteDontCare = makeConstantSubstitutor(candidateWithFreshVariables.getTypeParameters(), DONT_CARE)
@@ -111,9 +109,7 @@ class GenericCandidateResolver(
}
// Restore type variables before alpha-conversion
- val constraintSystemWithRightTypeParameters = constraintSystem.substituteTypeVariables {
- candidate.getTypeParameters().get(it.getIndex())
- }
+ val constraintSystemWithRightTypeParameters = constraintSystem.substituteTypeVariables { backConversion.get(it) }
candidateCall.setConstraintSystem(constraintSystemWithRightTypeParameters)
@@ -174,7 +170,7 @@ class GenericCandidateResolver(
val valueParameterDescriptor = entry.getKey()
for (valueArgument in resolvedValueArgument.getArguments()) {
- addConstraintForFunctionLiteral<D>(valueArgument, valueParameterDescriptor, constraintSystem, context)
+ addConstraintForFunctionLiteral(valueArgument, valueParameterDescriptor, constraintSystem, context)
}
}
resolvedCall.setResultingSubstitutor(constraintSystem.getResultingSubstitutor())
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/tasks/AbstractTracingStrategy.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/tasks/AbstractTracingStrategy.java
index a3f1ceaf1f65a..69f1fec7b059b 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/tasks/AbstractTracingStrategy.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/tasks/AbstractTracingStrategy.java
@@ -233,6 +233,9 @@ else if (status.hasTypeConstructorMismatch()) {
else if (status.hasConflictingConstraints()) {
trace.report(TYPE_INFERENCE_CONFLICTING_SUBSTITUTIONS.on(reference, data));
}
+ else if (status.hasTypeInferenceIncorporationError()) {
+ trace.report(TYPE_INFERENCE_INCORPORATION_ERROR.on(reference));
+ }
else {
assert status.hasUnknownParameters();
trace.report(TYPE_INFERENCE_NO_INFORMATION_FOR_PARAMETER.on(reference, data));
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/tasks/ResolutionTaskHolder.kt b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/tasks/ResolutionTaskHolder.kt
index 13e4117743c1a..edafbdb4cd476 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/tasks/ResolutionTaskHolder.kt
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/tasks/ResolutionTaskHolder.kt
@@ -58,7 +58,7 @@ public class ResolutionTaskHolder<D : CallableDescriptor, F : D>(
val lazyCandidates = {
candidatesList[candidateIndex]().filter { priorityProvider.getPriority(it) == priority }.toReadOnlyList()
}
- tasks.add(ResolutionTask(basicCallResolutionContext, tracing, lazyCandidates))
+ tasks.add(ResolutionTask<D, F>(basicCallResolutionContext, tracing, lazyCandidates))
}
}
diff --git a/compiler/testData/diagnostics/tests/delegatedProperty/inference/extensionProperty.kt b/compiler/testData/diagnostics/tests/delegatedProperty/inference/extensionProperty.kt
index ad3d058475bfd..6e41832302109 100644
--- a/compiler/testData/diagnostics/tests/delegatedProperty/inference/extensionProperty.kt
+++ b/compiler/testData/diagnostics/tests/delegatedProperty/inference/extensionProperty.kt
@@ -1,10 +1,10 @@
package foo
open class A {
- val B.w: Int by <!TYPE_INFERENCE_UPPER_BOUND_VIOLATED!>MyProperty<!>()
+ val B.w: Int by <!TYPE_INFERENCE_NO_INFORMATION_FOR_PARAMETER!>MyProperty<!>()
}
-val B.r: Int by <!TYPE_INFERENCE_UPPER_BOUND_VIOLATED!>MyProperty<!>()
+val B.r: Int by <!TYPE_INFERENCE_NO_INFORMATION_FOR_PARAMETER!>MyProperty<!>()
val A.e: Int by MyProperty()
diff --git a/compiler/testData/diagnostics/tests/inference/upperBounds/conflictingSubstitutionsFromUpperBound.kt b/compiler/testData/diagnostics/tests/inference/upperBounds/conflictingSubstitutionsFromUpperBound.kt
index 091243b2b0756..062a40d549dbd 100644
--- a/compiler/testData/diagnostics/tests/inference/upperBounds/conflictingSubstitutionsFromUpperBound.kt
+++ b/compiler/testData/diagnostics/tests/inference/upperBounds/conflictingSubstitutionsFromUpperBound.kt
@@ -7,6 +7,6 @@ fun <T, C: Collection<T>> convert(src: Collection<T>, dest: C): C = throw Except
fun test(l: List<Int>) {
//todo should be inferred
- val r = <!TYPE_INFERENCE_UPPER_BOUND_VIOLATED!>convert<!>(l, <!TYPE_INFERENCE_NO_INFORMATION_FOR_PARAMETER!>HashSet<!>())
- checkSubtype<Int>(<!DEBUG_INFO_ELEMENT_WITH_ERROR_TYPE!>r<!>)
+ val r = convert(l, HashSet())
+ checkSubtype<Collection<Int>>(r)
}
diff --git a/compiler/testData/diagnostics/tests/inference/upperBounds/doNotInferFromBoundsOnly.kt b/compiler/testData/diagnostics/tests/inference/upperBounds/doNotInferFromBoundsOnly.kt
index 1e5cd3be0e35a..775f12ec20ced 100644
--- a/compiler/testData/diagnostics/tests/inference/upperBounds/doNotInferFromBoundsOnly.kt
+++ b/compiler/testData/diagnostics/tests/inference/upperBounds/doNotInferFromBoundsOnly.kt
@@ -29,8 +29,7 @@ fun test3() {
fun <T, R: T> emptyStrangeMap1(t: T): Map<T, R> = throw Exception("$t")
fun test4() {
- //todo we may infer 'Int' for 'R' here
- <!TYPE_INFERENCE_NO_INFORMATION_FOR_PARAMETER!>emptyStrangeMap1<!>(1)
+ emptyStrangeMap1(1)
}
//--------------
@@ -38,8 +37,7 @@ fun test4() {
fun <T: A, R: T> emptyStrangeMap2(t: T): Map<T, R> where R: A = throw Exception("$t")
fun test5(a: A) {
- //todo we may infer 'A' for 'R' here
- <!TYPE_INFERENCE_NO_INFORMATION_FOR_PARAMETER!>emptyStrangeMap2<!>(a)
+ emptyStrangeMap2(a)
}
//--------------
diff --git a/compiler/testData/diagnostics/tests/platformTypes/inference.kt b/compiler/testData/diagnostics/tests/platformTypes/inference.kt
index a5fc0a7ff4d28..bc322da00d167 100644
--- a/compiler/testData/diagnostics/tests/platformTypes/inference.kt
+++ b/compiler/testData/diagnostics/tests/platformTypes/inference.kt
@@ -16,11 +16,10 @@ public class HS<T> extends Base<T> {}
import foo.*;
-import java.util.HashSet
fun <T, C: Base<T>> convert(src: HS<T>, dest: C): C = throw Exception("$src $dest")
fun test(l: HS<Int>) {
//todo should be inferred
- val r = <!TYPE_INFERENCE_UPPER_BOUND_VIOLATED!>convert<!>(l, <!TYPE_INFERENCE_NO_INFORMATION_FOR_PARAMETER!>HS<!>())
- checkSubtype<Int>(<!DEBUG_INFO_ELEMENT_WITH_ERROR_TYPE!>r<!>)
+ val r = convert(l, HS())
+ checkSubtype<Base<Int>>(r)
}
\ No newline at end of file
diff --git a/compiler/testData/diagnostics/tests/regressions/itselfAsUpperBoundLocal.kt b/compiler/testData/diagnostics/tests/regressions/itselfAsUpperBoundLocal.kt
index 353988510c342..35af733335afb 100644
--- a/compiler/testData/diagnostics/tests/regressions/itselfAsUpperBoundLocal.kt
+++ b/compiler/testData/diagnostics/tests/regressions/itselfAsUpperBoundLocal.kt
@@ -1,4 +1,4 @@
fun bar() {
fun <T: <!CYCLIC_GENERIC_UPPER_BOUND!>T?<!>> foo() {}
- foo()
+ <!TYPE_INFERENCE_NO_INFORMATION_FOR_PARAMETER!>foo<!>()
}
diff --git a/compiler/testData/diagnostics/tests/resolve/nestedCalls/kt7597.kt b/compiler/testData/diagnostics/tests/resolve/nestedCalls/kt7597.kt
index 9d8aac8a0833f..9e8cb9218b1de 100644
--- a/compiler/testData/diagnostics/tests/resolve/nestedCalls/kt7597.kt
+++ b/compiler/testData/diagnostics/tests/resolve/nestedCalls/kt7597.kt
@@ -1,10 +1,8 @@
-// !CHECK_TYPE
-
interface Inv<I>
fun <S, T: S> Inv<T>.reduce2(): S = null!!
fun test(a: Inv<Int>): Int {
- val b = 1 <!OVERLOAD_RESOLUTION_AMBIGUITY!>+<!> a.reduce2()
- return <!DEBUG_INFO_ELEMENT_WITH_ERROR_TYPE!>b<!>
+ val b = 1 + a.reduce2()
+ return b
}
\ No newline at end of file
diff --git a/compiler/testData/diagnostics/tests/resolve/nestedCalls/twoTypeParameters.kt b/compiler/testData/diagnostics/tests/resolve/nestedCalls/twoTypeParameters.kt
index ab81fa1efc1fd..298efdc4e3dc2 100644
--- a/compiler/testData/diagnostics/tests/resolve/nestedCalls/twoTypeParameters.kt
+++ b/compiler/testData/diagnostics/tests/resolve/nestedCalls/twoTypeParameters.kt
@@ -6,7 +6,12 @@ fun <D, E : D> List<ResolutionTask<D, E>>.bar(t: ResolutionTask<D, E>) = t
public class ResolutionTaskHolder<F, G : F> {
fun test(candidate: ResolutionCandidate<F>, tasks: MutableList<ResolutionTask<F, G>>) {
- tasks.bar(ResolutionTask(candidate))
+ tasks.bar(ResolutionTask<F, G>(candidate))
+ tasks.add(ResolutionTask<F, G>(candidate))
+
+ //todo the problem is the type of ResolutionTask is inferred as ResolutionTask<F, F> too early
+ tasks.<!TYPE_INFERENCE_CONFLICTING_SUBSTITUTIONS!>bar<!>(ResolutionTask(candidate))
+ tasks.<!NONE_APPLICABLE!>add<!>(ResolutionTask(candidate))
}
}
diff --git a/compiler/testData/diagnostics/testsWithStdLib/resolve/kt4711.kt b/compiler/testData/diagnostics/testsWithStdLib/resolve/kt4711.kt
index 0a14b59baa40f..1d783be79c87b 100644
--- a/compiler/testData/diagnostics/testsWithStdLib/resolve/kt4711.kt
+++ b/compiler/testData/diagnostics/testsWithStdLib/resolve/kt4711.kt
@@ -6,10 +6,10 @@ fun main(args:Array<String>) {
val startTimeNanos = System.nanoTime()
// the problem sits on the next line:
- val pi = 4.0.toDouble() * delta <!OVERLOAD_RESOLUTION_AMBIGUITY!>*<!> (1..n).reduce(
+ val pi = 4.0.toDouble() * delta <!OVERLOAD_RESOLUTION_AMBIGUITY!>*<!> (1..n).<!TYPE_INFERENCE_CONFLICTING_SUBSTITUTIONS!>reduce<!>(
{t, i ->
val x = (i - 0.5) * delta
- <!TYPE_MISMATCH!>t + 1.0 / (1.0 + x * x)<!>
+ t + 1.0 / (1.0 + x * x)
})
// !!! pi has error type here
diff --git a/compiler/testData/resolvedCalls/arguments/realExamples/emptyList.txt b/compiler/testData/resolvedCalls/arguments/realExamples/emptyList.txt
index 24203fd1e26b6..2ae11394112a7 100644
--- a/compiler/testData/resolvedCalls/arguments/realExamples/emptyList.txt
+++ b/compiler/testData/resolvedCalls/arguments/realExamples/emptyList.txt
@@ -12,7 +12,7 @@ fun bar() {
Resolved call:
Candidate descriptor: fun <T> foo(t: T): Unit defined in root package
-Resulting descriptor: fun <T> foo(t: List<???>): Unit defined in root package
+Resulting descriptor: fun <T> foo(t: ???): Unit defined in root package
Explicit receiver kind = NO_EXPLICIT_RECEIVER
Dispatch receiver = NO_RECEIVER
@@ -20,4 +20,4 @@ Extension receiver = NO_RECEIVER
Value arguments mapping:
-MATCH_MODULO_UNINFERRED_TYPES t : List<???> = someList()
+MATCH_MODULO_UNINFERRED_TYPES t : ??? = someList()
diff --git a/compiler/testData/resolvedCalls/arguments/realExamples/emptyMutableList.txt b/compiler/testData/resolvedCalls/arguments/realExamples/emptyMutableList.txt
index 6537d71ccba48..55ee516d0d439 100644
--- a/compiler/testData/resolvedCalls/arguments/realExamples/emptyMutableList.txt
+++ b/compiler/testData/resolvedCalls/arguments/realExamples/emptyMutableList.txt
@@ -12,7 +12,7 @@ fun bar() {
Resolved call:
Candidate descriptor: fun <T> foo(t: T): Unit defined in root package
-Resulting descriptor: fun <T> foo(t: MutableList<???>): Unit defined in root package
+Resulting descriptor: fun <T> foo(t: ???): Unit defined in root package
Explicit receiver kind = NO_EXPLICIT_RECEIVER
Dispatch receiver = NO_RECEIVER
@@ -20,4 +20,4 @@ Extension receiver = NO_RECEIVER
Value arguments mapping:
-MATCH_MODULO_UNINFERRED_TYPES t : MutableList<???> = someList()
+MATCH_MODULO_UNINFERRED_TYPES t : ??? = someList()
diff --git a/compiler/tests/org/jetbrains/kotlin/resolve/constraintSystem/AbstractConstraintSystemTest.kt b/compiler/tests/org/jetbrains/kotlin/resolve/constraintSystem/AbstractConstraintSystemTest.kt
index 1cffd813aba97..e676fad242859 100644
--- a/compiler/tests/org/jetbrains/kotlin/resolve/constraintSystem/AbstractConstraintSystemTest.kt
+++ b/compiler/tests/org/jetbrains/kotlin/resolve/constraintSystem/AbstractConstraintSystemTest.kt
@@ -36,9 +36,10 @@ import java.util.LinkedHashMap
import java.util.regex.Pattern
abstract public class AbstractConstraintSystemTest() : JetLiteFixture() {
- private val typePattern = """([\w|<|>|\(|\)]+)"""
- val constraintPattern = Pattern.compile("""(SUBTYPE|SUPERTYPE)\s+$typePattern\s+$typePattern\s*(weak)?""")
+ private val typePattern = """([\w|<|\,|>|?|\(|\)]+)"""
+ val constraintPattern = Pattern.compile("""(SUBTYPE|SUPERTYPE|EQUAL)\s+$typePattern\s+$typePattern\s*(weak)?""")
val variablesPattern = Pattern.compile("VARIABLES\\s+(.*)")
+ val fixVariablesPattern = "FIX_VARIABLES"
private var _typeResolver: TypeResolver? = null
private val typeResolver: TypeResolver
@@ -83,12 +84,10 @@ abstract public class AbstractConstraintSystemTest() : JetLiteFixture() {
val constraintSystem = ConstraintSystemImpl()
- val typeParameterDescriptors = LinkedHashMap<TypeParameterDescriptor, Variance>()
val variables = parseVariables(constraintsFileText)
- for (variable in variables) {
- typeParameterDescriptors.put(testDeclarations.getParameterDescriptor(variable), Variance.INVARIANT)
- }
- constraintSystem.registerTypeVariables(typeParameterDescriptors)
+ val fixVariables = constraintsFileText.contains(fixVariablesPattern)
+ val typeParameterDescriptors = variables.map { testDeclarations.getParameterDescriptor(it) }
+ constraintSystem.registerTypeVariables(typeParameterDescriptors, { Variance.INVARIANT })
val constraints = parseConstraints(constraintsFileText)
for (constraint in constraints) {
@@ -98,17 +97,18 @@ abstract public class AbstractConstraintSystemTest() : JetLiteFixture() {
when (constraint.kind) {
MyConstraintKind.SUBTYPE -> constraintSystem.addSubtypeConstraint(firstType, secondType, position)
MyConstraintKind.SUPERTYPE -> constraintSystem.addSupertypeConstraint(firstType, secondType, position)
+ MyConstraintKind.EQUAL -> constraintSystem.addConstraint(ConstraintSystemImpl.ConstraintKind.EQUAL, firstType, secondType, position)
}
}
- constraintSystem.processDeclaredBoundConstraints()
+ if (fixVariables) constraintSystem.fixVariables()
val resultingStatus = Renderers.RENDER_CONSTRAINT_SYSTEM_SHORT.render(constraintSystem)
val resultingSubstitutor = constraintSystem.getResultingSubstitutor()
val result = StringBuilder() append "result:\n"
- for ((typeParameter, variance) in typeParameterDescriptors) {
+ for (typeParameter in typeParameterDescriptors) {
val parameterType = testDeclarations.getType(typeParameter.getName().asString())
- val resultType = resultingSubstitutor.substitute(parameterType, variance)
+ val resultType = resultingSubstitutor.substitute(parameterType, Variance.INVARIANT)
result append "${typeParameter.getName()}=${resultType?.let{ DescriptorRenderer.SHORT_NAMES_IN_TYPES.renderType(it) }}\n"
}
@@ -118,7 +118,7 @@ abstract public class AbstractConstraintSystemTest() : JetLiteFixture() {
class MyConstraint(val kind: MyConstraintKind, val firstType: String, val secondType: String, val isWeak: Boolean)
enum class MyConstraintKind {
- SUBTYPE, SUPERTYPE
+ SUBTYPE, SUPERTYPE, EQUAL
}
private fun parseVariables(text: String): List<String> {
diff --git a/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintError.kt b/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintError.kt
index 5818c150f54ff..e3b983652221a 100644
--- a/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintError.kt
+++ b/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintError.kt
@@ -16,8 +16,9 @@
package org.jetbrains.kotlin.resolve.calls.inference
-import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.ConstraintPosition
import org.jetbrains.kotlin.descriptors.TypeParameterDescriptor
+import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.CompoundConstraintPosition
+import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.ConstraintPosition
open class ConstraintError(val constraintPosition: ConstraintPosition)
@@ -25,7 +26,12 @@ class TypeConstructorMismatch(constraintPosition: ConstraintPosition): Constrain
class ErrorInConstrainingType(constraintPosition: ConstraintPosition): ConstraintError(constraintPosition)
+class TypeInferenceError(constraintPosition: ConstraintPosition): ConstraintError(constraintPosition)
+
class CannotCapture(constraintPosition: ConstraintPosition, val typeVariable: TypeParameterDescriptor): ConstraintError(constraintPosition)
-fun ConstraintError.substituteTypeVariable(substitution: (TypeParameterDescriptor) -> TypeParameterDescriptor) =
- if (this is CannotCapture) CannotCapture(constraintPosition, substitution(typeVariable)) else this
\ No newline at end of file
+fun ConstraintError.substituteTypeVariable(substitution: (TypeParameterDescriptor) -> TypeParameterDescriptor?) =
+ if (this is CannotCapture) CannotCapture(constraintPosition, substitution(typeVariable) ?: typeVariable) else this
+
+fun newTypeInferenceOrConstructorMismatchError(constraintPosition: ConstraintPosition) =
+ if (constraintPosition is CompoundConstraintPosition) TypeInferenceError(constraintPosition) else TypeConstructorMismatch(constraintPosition)
\ No newline at end of file
diff --git a/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintPosition.kt b/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintPosition.kt
index 169096171a0e5..ca4b957f4529c 100644
--- a/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintPosition.kt
+++ b/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintPosition.kt
@@ -17,6 +17,7 @@
package org.jetbrains.kotlin.resolve.calls.inference.constraintPosition
import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.ConstraintPositionKind.*
+import java.util.*
public enum class ConstraintPositionKind {
RECEIVER_POSITION,
@@ -56,9 +57,14 @@ private data class ConstraintPositionWithIndex(override val kind: ConstraintPosi
class CompoundConstraintPosition(
vararg positions: ConstraintPosition
) : ConstraintPositionImpl(ConstraintPositionKind.COMPOUND_CONSTRAINT_POSITION) {
- val positions: Collection<ConstraintPosition> = positions.toList()
+ val positions: Collection<ConstraintPosition> =
+ positions.flatMap { if (it is CompoundConstraintPosition) it.positions else listOf(it) }.toCollection(LinkedHashSet<ConstraintPosition>())
override fun isStrong() = positions.any { it.isStrong() }
- override fun toString() = "$kind(${positions.joinToString()}"
+ override fun toString() = "$kind(${positions.joinToString()})"
}
+
+fun ConstraintPosition.equalsOrContains(position: ConstraintPosition): Boolean {
+ return if (this !is CompoundConstraintPosition) this == position else positions.any { it == position }
+}
\ No newline at end of file
diff --git a/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintSystem.kt b/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintSystem.kt
index 8a41660f50de7..3e18a4c3b7f2a 100644
--- a/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintSystem.kt
+++ b/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintSystem.kt
@@ -26,11 +26,16 @@ public trait ConstraintSystem {
/**
* Registers variables in a constraint system.
+ * The type variables for the corresponding function are local, the type variables of inner arguments calls are non-local.
*/
- public fun registerTypeVariables(typeVariables: Map<TypeParameterDescriptor, Variance>)
+ public fun registerTypeVariables(
+ typeVariables: Collection<TypeParameterDescriptor>,
+ typeVariableVariance: (TypeParameterDescriptor) -> Variance,
+ external: Boolean = false
+ )
/**
- * Returns a set of all registered type variables.
+ * Returns a set of all non-external registered type variables.
*/
public fun getTypeVariables(): Set<TypeParameterDescriptor>
diff --git a/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintSystemImpl.kt b/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintSystemImpl.kt
index 968451918b45b..4e620384132d9 100644
--- a/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintSystemImpl.kt
+++ b/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintSystemImpl.kt
@@ -16,35 +16,31 @@
package org.jetbrains.kotlin.resolve.calls.inference
-import org.jetbrains.kotlin.descriptors.TypeParameterDescriptor
-import org.jetbrains.kotlin.types.TypeProjection
-import org.jetbrains.kotlin.types.JetType
-import org.jetbrains.kotlin.types.TypeUtils
-import org.jetbrains.kotlin.types.TypeUtils.DONT_CARE
-import org.jetbrains.kotlin.types.TypeProjectionImpl
-import org.jetbrains.kotlin.types.TypeSubstitutor
-import org.jetbrains.kotlin.types.ErrorUtils
-import org.jetbrains.kotlin.types.ErrorUtils.FunctionPlaceholderTypeConstructor
-import org.jetbrains.kotlin.types.Variance
import org.jetbrains.kotlin.builtins.KotlinBuiltIns
+import org.jetbrains.kotlin.descriptors.TypeParameterDescriptor
import org.jetbrains.kotlin.descriptors.annotations.Annotations
-import org.jetbrains.kotlin.resolve.calls.inference.ConstraintSystemImpl.ConstraintKind
+import org.jetbrains.kotlin.resolve.calls.inference.ConstraintSystemImpl.ConstraintKind.EQUAL
+import org.jetbrains.kotlin.resolve.calls.inference.ConstraintSystemImpl.ConstraintKind.SUB_TYPE
+import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.Bound
+import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.BoundKind.EXACT_BOUND
+import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.BoundKind.LOWER_BOUND
+import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.BoundKind.UPPER_BOUND
+import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.CompoundConstraintPosition
+import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.ConstraintPosition
+import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.ConstraintPositionKind
+import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.ConstraintPositionKind.TYPE_BOUND_POSITION
+import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.equalsOrContains
+import org.jetbrains.kotlin.resolve.scopes.JetScope
+import org.jetbrains.kotlin.types.*
+import org.jetbrains.kotlin.types.ErrorUtils.FunctionPlaceholderTypeConstructor
+import org.jetbrains.kotlin.types.TypeUtils.DONT_CARE
import org.jetbrains.kotlin.types.checker.TypeCheckingProcedure
import org.jetbrains.kotlin.types.checker.TypeCheckingProcedureCallbacks
-import org.jetbrains.kotlin.types.TypeConstructor
-import java.util.LinkedHashMap
-import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.BoundKind.*
-import org.jetbrains.kotlin.resolve.calls.inference.ConstraintSystemImpl.ConstraintKind.*
-import java.util.HashMap
+import org.jetbrains.kotlin.types.typeUtil.getNestedTypeArguments
import java.util.ArrayList
-import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.ConstraintPosition
-import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.ConstraintPositionKind.*
-import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.CompoundConstraintPosition
-import org.jetbrains.kotlin.types.getCustomTypeVariable
-import org.jetbrains.kotlin.types.isFlexible
-import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.Bound
-import org.jetbrains.kotlin.types.TypeSubstitution
-import org.jetbrains.kotlin.types.checker.JetTypeChecker
+import java.util.HashMap
+import java.util.HashSet
+import java.util.LinkedHashMap
public class ConstraintSystemImpl : ConstraintSystem {
@@ -53,7 +49,15 @@ public class ConstraintSystemImpl : ConstraintSystem {
EQUAL
}
- private val typeParameterBounds = LinkedHashMap<TypeParameterDescriptor, TypeBoundsImpl>()
+ fun ConstraintKind.toBound() = if (this == SUB_TYPE) UPPER_BOUND else EXACT_BOUND
+
+ private val allTypeParameterBounds = LinkedHashMap<TypeParameterDescriptor, TypeBoundsImpl>()
+ private val externalTypeParameters = HashSet<TypeParameterDescriptor>()
+ private val typeParameterBounds: Map<TypeParameterDescriptor, TypeBoundsImpl>
+ get() = if (externalTypeParameters.isEmpty()) allTypeParameterBounds
+ else allTypeParameterBounds.filter { !externalTypeParameters.contains(it.key) }
+
+ private val usedInBounds = HashMap<TypeParameterDescriptor, MutableList<TypeBounds.Bound>>()
private val errors = ArrayList<ConstraintError>()
public val constraintErrors: List<ConstraintError>
@@ -64,7 +68,8 @@ public class ConstraintSystemImpl : ConstraintSystem {
override fun isSuccessful() = !hasContradiction() && !hasUnknownParameters()
- override fun hasContradiction() = hasTypeConstructorMismatch() || hasConflictingConstraints() || hasCannotCaptureTypesError()
+ override fun hasContradiction() = hasTypeConstructorMismatch() || hasTypeInferenceIncorporationError()
+ || hasConflictingConstraints() || hasCannotCaptureTypesError()
override fun hasViolatedUpperBound() = !isSuccessful() && getSystemWithoutWeakConstraints().getStatus().isSuccessful()
@@ -77,12 +82,14 @@ public class ConstraintSystemImpl : ConstraintSystem {
override fun hasOnlyErrorsFromPosition(constraintPosition: ConstraintPosition): Boolean {
if (isSuccessful()) return false
if (filterConstraintsOut(constraintPosition).getStatus().isSuccessful()) return true
- return errors.isNotEmpty() && errors.all { it.constraintPosition == constraintPosition }
+ return errors.isNotEmpty() && errors.all { it.constraintPosition.equalsOrContains(constraintPosition) }
}
override fun hasErrorInConstrainingTypes() = errors.any { it is ErrorInConstrainingType }
override fun hasCannotCaptureTypesError() = errors.any { it is CannotCapture }
+
+ override fun hasTypeInferenceIncorporationError() = errors.any { it is TypeInferenceError }
}
private fun getParameterToInferredValueMap(
@@ -105,7 +112,7 @@ public class ConstraintSystemImpl : ConstraintSystem {
}
private fun replaceUninferredBy(getDefaultValue: (TypeParameterDescriptor) -> TypeProjection): TypeSubstitutor {
- return TypeUtils.makeSubstitutorForTypeParametersMap(getParameterToInferredValueMap(typeParameterBounds, getDefaultValue))
+ return TypeUtils.makeSubstitutorForTypeParametersMap(getParameterToInferredValueMap(allTypeParameterBounds, getDefaultValue))
}
private fun replaceUninferredBy(defaultValue: JetType): TypeSubstitutor {
@@ -118,63 +125,78 @@ public class ConstraintSystemImpl : ConstraintSystem {
override fun getStatus(): ConstraintSystemStatus = constraintSystemStatus
- override fun registerTypeVariables(typeVariables: Map<TypeParameterDescriptor, Variance>) {
- for ((typeVariable, positionVariance) in typeVariables) {
- typeParameterBounds.put(typeVariable, TypeBoundsImpl(typeVariable, positionVariance))
+ override fun registerTypeVariables(
+ typeVariables: Collection<TypeParameterDescriptor>,
+ typeVariableVariance: (TypeParameterDescriptor) -> Variance,
+ external: Boolean
+ ) {
+ if (external) externalTypeParameters.addAll(typeVariables)
+
+ for (typeVariable in typeVariables) {
+ allTypeParameterBounds.put(typeVariable, TypeBoundsImpl(typeVariable, typeVariableVariance(typeVariable)))
}
- val constantSubstitutor = TypeUtils.makeConstantSubstitutor(typeParameterBounds.keySet(), DONT_CARE)
- for ((typeVariable, typeBounds) in typeParameterBounds) {
+ for ((typeVariable, typeBounds) in allTypeParameterBounds) {
for (declaredUpperBound in typeVariable.getUpperBounds()) {
if (KotlinBuiltIns.getInstance().getNullableAnyType() == declaredUpperBound) continue //todo remove this line (?)
- val substitutedBound = constantSubstitutor?.substitute(declaredUpperBound, Variance.INVARIANT)
val position = TYPE_BOUND_POSITION.position(typeVariable.getIndex())
- if (substitutedBound != null && !isErrorOrSpecialType(substitutedBound, position)) {
- typeBounds.addBound(UPPER_BOUND, substitutedBound, position)
- }
+ val variableType = JetTypeImpl(Annotations.EMPTY, typeVariable.getTypeConstructor(), false, listOf(), JetScope.Empty)
+ addBound(variableType, Bound(declaredUpperBound, UPPER_BOUND, position, declaredUpperBound.isProper()))
}
}
}
- public fun copy(): ConstraintSystem = createNewConstraintSystemFromThis({ it }, { it.copy() }, { true })
+ fun JetType.isProper() = !TypeUtils.containsSpecialType(this) {
+ type -> type.getConstructor().getDeclarationDescriptor() in getAllTypeVariables()
+ }
+
+ fun JetType.getNestedTypeVariables(): List<TypeParameterDescriptor> {
+ return getNestedTypeArguments().map { typeProjection ->
+ typeProjection.getType().getConstructor().getDeclarationDescriptor() as? TypeParameterDescriptor
+
+ }.filterNotNull().filter { it in getAllTypeVariables() }
+ }
- public fun substituteTypeVariables(typeVariablesMap: (TypeParameterDescriptor) -> TypeParameterDescriptor): ConstraintSystem {
+ public fun copy(): ConstraintSystem = createNewConstraintSystemFromThis({ it }, { true })
+
+ public fun substituteTypeVariables(typeVariablesMap: (TypeParameterDescriptor) -> TypeParameterDescriptor?): ConstraintSystem {
// type bounds are proper types and don't contain other variables
- return createNewConstraintSystemFromThis(typeVariablesMap, { it }, { true })
+ return createNewConstraintSystemFromThis(typeVariablesMap, { true })
}
- public fun filterConstraintsOut(vararg excludePositions: ConstraintPosition): ConstraintSystem {
- val positions = excludePositions.toSet()
- return filterConstraints { !positions.contains(it) }
+ public fun filterConstraintsOut(excludePosition: ConstraintPosition): ConstraintSystem {
+ return filterConstraints { !it.equalsOrContains(excludePosition) }
}
public fun filterConstraints(condition: (ConstraintPosition) -> Boolean): ConstraintSystem {
- return createNewConstraintSystemFromThis({ it }, { it.filter(condition) }, condition)
+ return createNewConstraintSystemFromThis({ it }, condition)
}
public fun getSystemWithoutWeakConstraints(): ConstraintSystem {
- return filterConstraints {
- constraintPosition ->
+ return filterConstraints(fun (constraintPosition): Boolean {
+ if (constraintPosition !is CompoundConstraintPosition) return constraintPosition.isStrong()
+
// 'isStrong' for compound means 'has some strong constraints'
// but for testing absence of weak constraints we need 'has only strong constraints' here
- if (constraintPosition is CompoundConstraintPosition) {
- constraintPosition.positions.all { it.isStrong() }
- }
- else {
- constraintPosition.isStrong()
- }
- }
+ return constraintPosition.positions.all { it.isStrong() }
+ })
}
private fun createNewConstraintSystemFromThis(
- substituteTypeVariable: (TypeParameterDescriptor) -> TypeParameterDescriptor,
- replaceTypeBounds: (TypeBoundsImpl) -> TypeBoundsImpl,
+ substituteTypeVariable: (TypeParameterDescriptor) -> TypeParameterDescriptor?,
filterConstraintPosition: (ConstraintPosition) -> Boolean
): ConstraintSystem {
val newSystem = ConstraintSystemImpl()
- for ((typeParameter, typeBounds) in typeParameterBounds) {
- val newTypeParameter = substituteTypeVariable(typeParameter)
- newSystem.typeParameterBounds.put(newTypeParameter, replaceTypeBounds(typeBounds))
+ for ((typeParameter, typeBounds) in allTypeParameterBounds) {
+ val newTypeParameter = substituteTypeVariable(typeParameter) ?: typeParameter
+ newSystem.allTypeParameterBounds.put(newTypeParameter, typeBounds.copy(substituteTypeVariable).filter(filterConstraintPosition))
}
+ for ((typeVariable, bounds) in usedInBounds) {
+ if (bounds.isNotEmpty()) {
+ val newTypeVariable = substituteTypeVariable(typeVariable) ?: typeVariable
+ newSystem.usedInBounds.put(newTypeVariable, ArrayList(bounds.substitute(substituteTypeVariable)))
+ }
+ }
+ newSystem.externalTypeParameters.addAll(externalTypeParameters.map { substituteTypeVariable(it) ?: it })
newSystem.errors.addAll(errors.filter { filterConstraintPosition(it.constraintPosition) }.map { it.substituteTypeVariable(substituteTypeVariable) })
return newSystem
}
@@ -189,7 +211,7 @@ public class ConstraintSystemImpl : ConstraintSystem {
addConstraint(SUB_TYPE, constrainingType, subjectType, constraintPosition)
}
- private fun addConstraint(constraintKind: ConstraintKind, subType: JetType?, superType: JetType?, constraintPosition: ConstraintPosition) {
+ fun addConstraint(constraintKind: ConstraintKind, subType: JetType?, superType: JetType?, constraintPosition: ConstraintPosition) {
val typeCheckingProcedure = TypeCheckingProcedure(object : TypeCheckingProcedureCallbacks {
private var depth = 0
@@ -213,7 +235,9 @@ public class ConstraintSystemImpl : ConstraintSystem {
}
override fun capture(typeVariable: JetType, typeProjection: TypeProjection): Boolean {
+ if (isMyTypeVariable(typeProjection.getType())) return false
val myTypeVariable = getMyTypeVariable(typeVariable)
+
if (myTypeVariable != null && constraintPosition.isCaptureAllowed()) {
if (depth > 0) {
errors.add(CannotCapture(constraintPosition, myTypeVariable))
@@ -225,7 +249,7 @@ public class ConstraintSystemImpl : ConstraintSystem {
}
override fun noCorrespondingSupertype(subtype: JetType, supertype: JetType): Boolean {
- errors.add(TypeConstructorMismatch(constraintPosition))
+ errors.add(newTypeInferenceOrConstructorMismatchError(constraintPosition))
return true
}
})
@@ -266,42 +290,54 @@ public class ConstraintSystemImpl : ConstraintSystem {
// we don't add it without knowing whether it's a function type or an extension function type
return
}
- createCorrespondingFunctionTypeForFunctionPlaceholder(subType, superType)
+ createTypeForFunctionPlaceholder(subType, superType)
}
else {
subType
}
fun simplifyConstraint(subType: JetType, superType: JetType) {
- // can be equal for the recursive invocations: fun <T> foo(i: Int) : T { ... return foo(i); } => T <: T
- // the right processing of constraints connecting type variables is not supported yet
- if (isMyTypeVariable(subType) && isMyTypeVariable(superType)) return
-
if (isMyTypeVariable(subType)) {
- val boundKind = if (constraintKind == SUB_TYPE) UPPER_BOUND else EXACT_BOUND
- generateTypeParameterConstraint(subType, superType, boundKind, constraintPosition)
+ generateTypeParameterBound(subType, superType, constraintKind.toBound(), constraintPosition)
return
}
if (isMyTypeVariable(superType)) {
- val boundKind = if (constraintKind == SUB_TYPE) LOWER_BOUND else EXACT_BOUND
- generateTypeParameterConstraint(superType, subType, boundKind, constraintPosition)
+ generateTypeParameterBound(superType, subType, constraintKind.toBound().reverse(), constraintPosition)
return
}
// if superType is nullable and subType is not nullable, unsafe call or type mismatch error will be generated later,
// but constraint system should be solved anyway
val subTypeNotNullable = TypeUtils.makeNotNullable(subType)
val superTypeNotNullable = TypeUtils.makeNotNullable(superType)
- if (constraintKind == EQUAL) {
+ val result = if (constraintKind == EQUAL) {
typeCheckingProcedure.equalTypes(subTypeNotNullable, superTypeNotNullable)
}
else {
- typeCheckingProcedure.isSubtypeOf(subTypeNotNullable, superTypeNotNullable)
+ typeCheckingProcedure.isSubtypeOf(subTypeNotNullable, superType)
}
+ if (!result) errors.add(newTypeInferenceOrConstructorMismatchError(constraintPosition))
}
simplifyConstraint(newSubType, superType)
+
}
- private fun generateTypeParameterConstraint(
+ fun addBound(variable: JetType, bound: Bound) {
+ val typeBounds = getTypeBounds(variable)
+ if (typeBounds.bounds.contains(bound)) return
+
+ typeBounds.addBound(bound)
+
+ if (!bound.isProper) {
+ for (dependentTypeVariable in bound.constrainingType.getNestedTypeVariables()) {
+ val dependentBounds = usedInBounds.getOrPut(dependentTypeVariable) { arrayListOf() }
+ dependentBounds.add(bound)
+ }
+ }
+
+ incorporateBound(variable, bound)
+ }
+
+ private fun generateTypeParameterBound(
parameterType: JetType,
constrainingType: JetType,
boundKind: TypeBounds.BoundKind,
@@ -324,10 +360,8 @@ public class ConstraintSystemImpl : ConstraintSystem {
}
}
- val typeBounds = getTypeBounds(parameterType)
-
if (!parameterType.isMarkedNullable() || !TypeUtils.isNullableType(newConstrainingType)) {
- typeBounds.addBound(boundKind, newConstrainingType, constraintPosition)
+ addBound(parameterType, Bound(newConstrainingType, boundKind, constraintPosition, newConstrainingType.isProper()))
return
}
// For parameter type T:
@@ -335,13 +369,14 @@ public class ConstraintSystemImpl : ConstraintSystem {
// constraint T? = Int! should transform to T >: Int and T <: Int!
// constraints T? >: Int?; T? >: Int! should transform to T >: Int
+ val notNullParameterType = TypeUtils.makeNotNullable(parameterType)
val notNullConstrainingType = TypeUtils.makeNotNullable(newConstrainingType)
if (boundKind == EXACT_BOUND || boundKind == LOWER_BOUND) {
- typeBounds.addBound(LOWER_BOUND, notNullConstrainingType, constraintPosition)
+ addBound(notNullParameterType, Bound(notNullConstrainingType, LOWER_BOUND, constraintPosition, notNullConstrainingType.isProper()))
}
// constraints T? <: Int?; T? <: Int! should transform to T <: Int?; T <: Int! correspondingly
if (boundKind == EXACT_BOUND || boundKind == UPPER_BOUND) {
- typeBounds.addBound(UPPER_BOUND, newConstrainingType, constraintPosition)
+ addBound(notNullParameterType, Bound(newConstrainingType, UPPER_BOUND, constraintPosition, newConstrainingType.isProper()))
}
}
@@ -355,7 +390,6 @@ public class ConstraintSystemImpl : ConstraintSystem {
&& constrainingTypeProjection.getProjectionKind() == Variance.IN_VARIANCE) {
errors.add(CannotCapture(constraintPosition, typeVariable))
}
- val typeBounds = getTypeBounds(typeVariable)
val typeProjection = if (parameterType.isMarkedNullable()) {
TypeProjectionImpl(constrainingTypeProjection.getProjectionKind(), TypeUtils.makeNotNullable(constrainingTypeProjection.getType()))
}
@@ -363,51 +397,32 @@ public class ConstraintSystemImpl : ConstraintSystem {
constrainingTypeProjection
}
val capturedType = createCapturedType(typeProjection)
- typeBounds.addBound(EXACT_BOUND, capturedType, constraintPosition)
- }
-
- public fun processDeclaredBoundConstraints() {
- for ((typeParameterDescriptor, typeBounds) in typeParameterBounds) {
- fun compoundPosition(bound: Bound) = CompoundConstraintPosition(
- TYPE_BOUND_POSITION.position(typeParameterDescriptor.getIndex()), bound.position)
-
- // todo order matters here
- // it's important to create a separate variable here,
- // because the following code may add new elements to typeBounds.bounds collection
- val bounds = ArrayList(typeBounds.bounds)
- for (declaredUpperBound in typeParameterDescriptor.getUpperBounds()) {
- bounds.filter { it.kind != UPPER_BOUND }.forEach {
- lowerOrExactBound ->
- addSubtypeConstraint(lowerOrExactBound.constrainingType, declaredUpperBound, compoundPosition(lowerOrExactBound))
- }
- if (!isMyTypeVariable(declaredUpperBound)) continue
- getTypeBounds(declaredUpperBound).bounds.filter { it.kind != LOWER_BOUND }.forEach {
- upperOrExactBound ->
- typeBounds.addBound(UPPER_BOUND, upperOrExactBound.constrainingType, compoundPosition(upperOrExactBound))
- }
- }
- }
+ addBound(TypeUtils.makeNotNullable(parameterType), Bound(capturedType, EXACT_BOUND, constraintPosition))
}
override fun getTypeVariables() = typeParameterBounds.keySet()
+ fun getAllTypeVariables() = allTypeParameterBounds.keySet()
+
+ fun getBoundsUsedIn(typeVariable: TypeParameterDescriptor): List<Bound> = usedInBounds[typeVariable] ?: emptyList()
+
override fun getTypeBounds(typeVariable: TypeParameterDescriptor): TypeBoundsImpl {
if (!isMyTypeVariable(typeVariable)) {
throw IllegalArgumentException("TypeParameterDescriptor is not a type variable for constraint system: $typeVariable")
}
- return typeParameterBounds[typeVariable]!!
+ return allTypeParameterBounds[typeVariable]!!
}
- private fun getTypeBounds(parameterType: JetType): TypeBoundsImpl {
+ fun getTypeBounds(parameterType: JetType): TypeBoundsImpl {
assert (isMyTypeVariable(parameterType)) { "Type is not a type variable for constraint system: $parameterType" }
return getTypeBounds(getMyTypeVariable(parameterType)!!)
}
- private fun isMyTypeVariable(typeVariable: TypeParameterDescriptor) = typeParameterBounds.contains(typeVariable)
+ fun isMyTypeVariable(typeVariable: TypeParameterDescriptor) = allTypeParameterBounds.contains(typeVariable)
- private fun isMyTypeVariable(type: JetType): Boolean = getMyTypeVariable(type) != null
+ fun isMyTypeVariable(type: JetType): Boolean = getMyTypeVariable(type) != null
- private fun getMyTypeVariable(type: JetType): TypeParameterDescriptor? {
+ fun getMyTypeVariable(type: JetType): TypeParameterDescriptor? {
val typeParameterDescriptor = type.getConstructor().getDeclarationDescriptor() as? TypeParameterDescriptor
return if (typeParameterDescriptor != null && isMyTypeVariable(typeParameterDescriptor)) typeParameterDescriptor else null
}
@@ -415,13 +430,36 @@ public class ConstraintSystemImpl : ConstraintSystem {
override fun getResultingSubstitutor() = replaceUninferredBySpecialErrorType().setApproximateCapturedTypes()
override fun getCurrentSubstitutor() = replaceUninferredBy(TypeUtils.DONT_CARE).setApproximateCapturedTypes()
+
+ fun fixVariable(typeVariable: TypeParameterDescriptor) {
+ val typeBounds = getTypeBounds(typeVariable)
+ if (typeBounds.isFixed) return
+ typeBounds.setFixed()
+
+ val nestedTypeVariables = typeBounds.bounds.flatMap { it.constrainingType.getNestedTypeVariables() }
+ nestedTypeVariables.forEach { fixVariable(it) }
+
+ val value = typeBounds.value ?: return
+
+ val type = JetTypeImpl(Annotations.EMPTY, typeVariable.getTypeConstructor(), false, emptyList(), JetScope.Empty)
+ addBound(type, TypeBounds.Bound(value, TypeBounds.BoundKind.EXACT_BOUND, ConstraintPositionKind.FROM_COMPLETER.position()))
+ }
+
+ fun fixVariables() {
+ // todo variables should be fixed in the right order
+ val (external, functionTypeParameters) = getAllTypeVariables().partition { externalTypeParameters.contains(it) }
+ external.forEach { fixVariable(it) }
+ functionTypeParameters.forEach { fixVariable(it) }
+ }
+
}
-fun createCorrespondingFunctionTypeForFunctionPlaceholder(
+fun createTypeForFunctionPlaceholder(
functionPlaceholder: JetType,
expectedType: JetType
): JetType {
- assert(ErrorUtils.isFunctionPlaceholder(functionPlaceholder)) { "Function placeholder type expected: $functionPlaceholder" }
+ if (!ErrorUtils.isFunctionPlaceholder(functionPlaceholder)) return functionPlaceholder
+
val functionPlaceholderTypeConstructor = functionPlaceholder.getConstructor() as FunctionPlaceholderTypeConstructor
val isExtension = KotlinBuiltIns.isExtensionFunctionType(expectedType)
@@ -450,3 +488,7 @@ private class SubstitutionWithCapturedTypeApproximation(val substitution: TypeSu
override fun isEmpty() = substitution.isEmpty()
override fun approximateCapturedTypes() = true
}
+
+public fun ConstraintSystemImpl.registerTypeVariables(typeVariables: Map<TypeParameterDescriptor, Variance>) {
+ registerTypeVariables(typeVariables.keySet(), { typeVariables[it]!! })
+}
\ No newline at end of file
diff --git a/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintSystemStatus.kt b/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintSystemStatus.kt
index 367ec7c147947..1b403fb513a4f 100644
--- a/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintSystemStatus.kt
+++ b/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/ConstraintSystemStatus.kt
@@ -84,4 +84,9 @@ public trait ConstraintSystemStatus {
* in invocation <tt>foo(array)</tt> where array has type <tt>Array<Array<out Int>></tt>.
*/
public fun hasCannotCaptureTypesError(): Boolean
+
+ /**
+ * Returns <tt>true</tt> if there's an error in constraint system incorporation.
+ */
+ public fun hasTypeInferenceIncorporationError(): Boolean
}
diff --git a/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/TypeBounds.kt b/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/TypeBounds.kt
index fc759a211a82b..4ac4d79d743cc 100644
--- a/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/TypeBounds.kt
+++ b/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/TypeBounds.kt
@@ -16,10 +16,15 @@
package org.jetbrains.kotlin.resolve.calls.inference
-import org.jetbrains.kotlin.types.Variance
import org.jetbrains.kotlin.descriptors.TypeParameterDescriptor
-import org.jetbrains.kotlin.types.JetType
+import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.BoundKind
+import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.BoundKind.EXACT_BOUND
+import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.BoundKind.LOWER_BOUND
+import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.BoundKind.UPPER_BOUND
import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.ConstraintPosition
+import org.jetbrains.kotlin.types.JetType
+import org.jetbrains.kotlin.types.Variance
+import kotlin.properties.Delegates
public trait TypeBounds {
public val varianceOfPosition: Variance
@@ -29,6 +34,7 @@ public trait TypeBounds {
public val bounds: Collection<Bound>
public val value: JetType?
+ get() = if (values.size() == 1) values.first() else null
public val values: Collection<JetType>
@@ -38,5 +44,41 @@ public trait TypeBounds {
EXACT_BOUND
}
- public class Bound(public val constrainingType: JetType, public val kind: BoundKind, public val position: ConstraintPosition)
+ public class Bound(
+ public val constrainingType: JetType,
+ public val kind: BoundKind,
+ public val position: ConstraintPosition,
+ public val isProper: Boolean = true
+ ) {
+ public var typeVariable: TypeParameterDescriptor by Delegates.notNull()
+
+ override fun equals(other: Any?): Boolean {
+ if (this === other) return true
+ if (other == null || javaClass != other.javaClass) return false
+
+ val bound = other as Bound
+
+ if (constrainingType != bound.constrainingType) return false
+ if (kind != bound.kind) return false
+
+ if (position.isStrong() != bound.position.isStrong()) return false
+
+ return true
+ }
+
+ override fun hashCode(): Int {
+ var result = constrainingType.hashCode()
+ result = 31 * result + kind.hashCode()
+ result = 31 * result + if (position.isStrong()) 1 else 0
+ return result
+ }
+
+ override fun toString() = "Bound($constrainingType, $kind, $position, isProper = $isProper)"
+ }
+}
+
+fun BoundKind.reverse() = when (this) {
+ LOWER_BOUND -> UPPER_BOUND
+ UPPER_BOUND -> LOWER_BOUND
+ EXACT_BOUND -> EXACT_BOUND
}
diff --git a/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/TypeBoundsImpl.kt b/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/TypeBoundsImpl.kt
index 2b921f82b4a63..1a05aaeef8de0 100644
--- a/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/TypeBoundsImpl.kt
+++ b/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/TypeBoundsImpl.kt
@@ -17,32 +17,40 @@
package org.jetbrains.kotlin.resolve.calls.inference
import org.jetbrains.kotlin.descriptors.TypeParameterDescriptor
-import org.jetbrains.kotlin.types.Variance
+import org.jetbrains.kotlin.descriptors.annotations.Annotations
import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.Bound
-import org.jetbrains.kotlin.types.JetType
import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.BoundKind
-import org.jetbrains.kotlin.types.ErrorUtils
-import org.jetbrains.kotlin.types.CommonSupertypes
-import org.jetbrains.kotlin.types.TypeUtils
-import org.jetbrains.kotlin.types.checker.JetTypeChecker
-import org.jetbrains.kotlin.resolve.constants.IntegerValueTypeConstructor
-import java.util.LinkedHashSet
-import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.BoundKind.*
+import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.BoundKind.EXACT_BOUND
+import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.BoundKind.LOWER_BOUND
+import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.BoundKind.UPPER_BOUND
import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.ConstraintPosition
+import org.jetbrains.kotlin.resolve.constants.IntegerValueTypeConstructor
+import org.jetbrains.kotlin.resolve.scopes.JetScope
+import org.jetbrains.kotlin.types.*
+import org.jetbrains.kotlin.types.checker.JetTypeChecker
import org.jetbrains.kotlin.utils.addIfNotNull
-import org.jetbrains.kotlin.types.singleBestRepresentative
+import java.util.ArrayList
+import java.util.LinkedHashSet
public class TypeBoundsImpl(
override val typeVariable: TypeParameterDescriptor,
override val varianceOfPosition: Variance
) : TypeBounds {
- override val bounds = LinkedHashSet<Bound>()
+ override val bounds = ArrayList<Bound>()
private var resultValues: Collection<JetType>? = null
- public fun addBound(kind: BoundKind, constrainingType: JetType, position: ConstraintPosition) {
+ var isFixed: Boolean = false
+ private set
+
+ public fun setFixed() {
+ isFixed = true
+ }
+
+ public fun addBound(bound: Bound) {
resultValues = null
- bounds.add(Bound(constrainingType, kind, position))
+ bound.typeVariable = typeVariable
+ bounds.add(bound)
}
private fun filterBounds(bounds: Collection<Bound>, kind: BoundKind): Set<JetType> {
@@ -64,9 +72,14 @@ public class TypeBoundsImpl(
return result
}
- fun copy(): TypeBoundsImpl {
- val typeBounds = TypeBoundsImpl(typeVariable, varianceOfPosition)
- typeBounds.bounds.addAll(bounds)
+ fun copy(substituteTypeVariable: ((TypeParameterDescriptor) -> TypeParameterDescriptor?)? = null): TypeBoundsImpl {
+ val typeBounds = TypeBoundsImpl(substituteTypeVariable?.invoke(typeVariable) ?: typeVariable, varianceOfPosition)
+ if (substituteTypeVariable == null) {
+ typeBounds.bounds.addAll(bounds)
+ }
+ else {
+ typeBounds.bounds.addAll(bounds.substitute(substituteTypeVariable))
+ }
typeBounds.resultValues = resultValues
return typeBounds
}
@@ -77,9 +90,6 @@ public class TypeBoundsImpl(
return result
}
- override val value: JetType?
- get() = if (values.size() == 1) values.first() else null
-
override val values: Collection<JetType>
get() {
if (resultValues == null) {
@@ -90,6 +100,8 @@ public class TypeBoundsImpl(
private fun computeValues(): Collection<JetType> {
val values = LinkedHashSet<JetType>()
+ val bounds = bounds.filter { it.isProper }
+
if (bounds.isEmpty()) {
return listOf()
}
@@ -101,7 +113,7 @@ public class TypeBoundsImpl(
val exactBounds = filterBounds(bounds, EXACT_BOUND, values)
val bestFit = exactBounds.singleBestRepresentative()
if (bestFit != null) {
- if (tryPossibleAnswer(bestFit)) {
+ if (tryPossibleAnswer(bounds, bestFit)) {
return listOf(bestFit)
}
}
@@ -111,7 +123,7 @@ public class TypeBoundsImpl(
filterBounds(bounds, LOWER_BOUND, values).partition { it.getConstructor() is IntegerValueTypeConstructor }
val superTypeOfLowerBounds = CommonSupertypes.commonSupertypeForNonDenotableTypes(generalLowerBounds)
- if (tryPossibleAnswer(superTypeOfLowerBounds)) {
+ if (tryPossibleAnswer(bounds, superTypeOfLowerBounds)) {
return setOf(superTypeOfLowerBounds!!)
}
values.addIfNotNull(superTypeOfLowerBounds)
@@ -121,14 +133,14 @@ public class TypeBoundsImpl(
//foo(1, c: Consumer<Any>) - infer Int, not Any here
val superTypeOfNumberLowerBounds = TypeUtils.commonSupertypeForNumberTypes(numberLowerBounds)
- if (tryPossibleAnswer(superTypeOfNumberLowerBounds)) {
+ if (tryPossibleAnswer(bounds, superTypeOfNumberLowerBounds)) {
return setOf(superTypeOfNumberLowerBounds!!)
}
values.addIfNotNull(superTypeOfNumberLowerBounds)
if (superTypeOfLowerBounds != null && superTypeOfNumberLowerBounds != null) {
val superTypeOfAllLowerBounds = CommonSupertypes.commonSupertypeForNonDenotableTypes(listOf(superTypeOfLowerBounds, superTypeOfNumberLowerBounds))
- if (tryPossibleAnswer(superTypeOfAllLowerBounds)) {
+ if (tryPossibleAnswer(bounds, superTypeOfAllLowerBounds)) {
return setOf(superTypeOfAllLowerBounds!!)
}
}
@@ -136,7 +148,7 @@ public class TypeBoundsImpl(
val upperBounds = filterBounds(bounds, TypeBounds.BoundKind.UPPER_BOUND, values)
val intersectionOfUpperBounds = TypeUtils.intersect(JetTypeChecker.DEFAULT, upperBounds)
if (!upperBounds.isEmpty() && intersectionOfUpperBounds != null) {
- if (tryPossibleAnswer(intersectionOfUpperBounds)) {
+ if (tryPossibleAnswer(bounds, intersectionOfUpperBounds)) {
return setOf(intersectionOfUpperBounds)
}
}
@@ -146,7 +158,7 @@ public class TypeBoundsImpl(
return values
}
- private fun tryPossibleAnswer(possibleAnswer: JetType?): Boolean {
+ private fun tryPossibleAnswer(bounds: Collection<Bound>, possibleAnswer: JetType?): Boolean {
if (possibleAnswer == null) return false
// a captured type might be an answer
if (!possibleAnswer.getConstructor().isDenotable() && !possibleAnswer.isCaptured()) return false
@@ -169,3 +181,30 @@ public class TypeBoundsImpl(
return true
}
}
+
+fun Collection<Bound>.substitute(substituteTypeVariable: (TypeParameterDescriptor) -> TypeParameterDescriptor?): List<Bound> {
+ val typeSubstitutor = TypeSubstitutor.create(object : TypeSubstitution() {
+ override fun get(key: TypeConstructor): TypeProjection? {
+ val descriptor = key.getDeclarationDescriptor()
+ if (descriptor !is TypeParameterDescriptor) return null
+ val typeParameterDescriptor = substituteTypeVariable(descriptor) ?: return null
+
+ val type = JetTypeImpl(Annotations.EMPTY, typeParameterDescriptor.getTypeConstructor(), false, listOf(), JetScope.Empty)
+ return TypeProjectionImpl(type)
+ }
+ })
+
+ return map {
+ //todo captured types
+ val substitutedType = if (it.constrainingType.getConstructor().isDenotable()) {
+ typeSubstitutor.substitute(it.constrainingType, Variance.INVARIANT)
+ } else {
+ it.constrainingType
+ }
+ substitutedType?.let { type ->
+ val newBound = Bound(type, it.kind, it.position, it.isProper)
+ newBound.typeVariable = substituteTypeVariable(it.typeVariable) ?: it.typeVariable
+ newBound
+ }
+ }.filterNotNull()
+}
\ No newline at end of file
diff --git a/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/constraintIncorporation.kt b/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/constraintIncorporation.kt
new file mode 100644
index 0000000000000..821a129c10bc3
--- /dev/null
+++ b/core/descriptors/src/org/jetbrains/kotlin/resolve/calls/inference/constraintIncorporation.kt
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2010-2015 JetBrains s.r.o.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.jetbrains.kotlin.resolve.calls.inference
+
+import org.jetbrains.kotlin.descriptors.TypeParameterDescriptor
+import org.jetbrains.kotlin.descriptors.annotations.Annotations
+import org.jetbrains.kotlin.resolve.calls.inference.ConstraintSystemImpl.ConstraintKind.EQUAL
+import org.jetbrains.kotlin.resolve.calls.inference.ConstraintSystemImpl.ConstraintKind.SUB_TYPE
+import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.Bound
+import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.BoundKind
+import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.BoundKind.EXACT_BOUND
+import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.BoundKind.LOWER_BOUND
+import org.jetbrains.kotlin.resolve.calls.inference.TypeBounds.BoundKind.UPPER_BOUND
+import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.CompoundConstraintPosition
+import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.ConstraintPositionKind
+import org.jetbrains.kotlin.resolve.scopes.JetScope
+import org.jetbrains.kotlin.types.*
+import org.jetbrains.kotlin.types.Variance.INVARIANT
+import org.jetbrains.kotlin.types.Variance.IN_VARIANCE
+import org.jetbrains.kotlin.types.typeUtil.getNestedTypeArguments
+import java.util.ArrayList
+
+fun ConstraintSystemImpl.incorporateBound(variable: JetType, newBound: Bound) {
+ val typeVariable = getMyTypeVariable(variable)!!
+ val typeBounds = getTypeBounds(typeVariable)
+
+ for (oldBoundIndex in typeBounds.bounds.indices) {
+ addConstraintFromBounds(typeBounds.bounds[oldBoundIndex], newBound)
+ }
+ val boundsUsedIn = getBoundsUsedIn(typeVariable)
+ for (index in boundsUsedIn.indices) {
+ val boundUsedIn = boundsUsedIn[index]
+ val type = JetTypeImpl(Annotations.EMPTY, boundUsedIn.typeVariable.getTypeConstructor(), false, listOf(), JetScope.Empty)
+ generateNewBound(type, boundUsedIn, newBound)
+ }
+
+ val constrainingType = newBound.constrainingType
+ if (isMyTypeVariable(constrainingType)) {
+ val bound = Bound(variable, newBound.kind.reverse(), newBound.position, isProper = false)
+ addBound(constrainingType, bound)
+ return
+ }
+ constrainingType.getNestedTypeVariables().forEach {
+ val boundsForNestedVariable = getTypeBounds(it).bounds
+ for (index in boundsForNestedVariable.indices) {
+ generateNewBound(variable, newBound, boundsForNestedVariable[index])
+ }
+ }
+}
+
+private fun ConstraintSystemImpl.addConstraintFromBounds(old: Bound, new: Bound) {
+ if (old == new) return
+
+ val oldType = old.constrainingType
+ val newType = new.constrainingType
+ val position = CompoundConstraintPosition(old.position, new.position)
+
+ when (old.kind to new.kind) {
+ LOWER_BOUND to UPPER_BOUND, LOWER_BOUND to EXACT_BOUND, EXACT_BOUND to UPPER_BOUND ->
+ addConstraint(SUB_TYPE, oldType, newType, position)
+
+ UPPER_BOUND to LOWER_BOUND, UPPER_BOUND to EXACT_BOUND, EXACT_BOUND to LOWER_BOUND ->
+ addConstraint(SUB_TYPE, newType, oldType, position)
+
+ EXACT_BOUND to EXACT_BOUND ->
+ addConstraint(EQUAL, oldType, newType, position)
+ }
+}
+
+private fun ConstraintSystemImpl.generateNewBound(
+ variable: JetType,
+ bound: Bound,
+ substitution: Bound
+) {
+ // Let's have a variable T, a bound 'T <=> My<R>', and a substitution 'R <=> Type'.
+ // Here <=> means lower_bound, upper_bound or exact_bound constraint.
+ // Then a new bound 'T <=> My<Type>' can be generated.
+
+ // A variance of R in 'My<R>' (with respect to both use-site and declaration-site variance).
+ val substitutionVariance: Variance = bound.constrainingType.getNestedTypeArguments().firstOrNull {
+ getMyTypeVariable(it.getType()) === substitution.typeVariable
+ }?.getProjectionKind() ?: return
+
+ // We don't substitute anything into recursive constraints
+ if (substitution.typeVariable == bound.typeVariable) return
+
+ //todo variance checker
+ val newKind = computeKindOfNewBound(bound.kind, substitutionVariance, substitution.kind) ?: return
+
+ val newTypeProjection = TypeProjectionImpl(substitutionVariance, substitution.constrainingType)
+ val substitutor = TypeSubstitutor.create(mapOf(substitution.typeVariable.getTypeConstructor() to newTypeProjection))
+ val newConstrainingType = substitutor.substitute(bound.constrainingType, INVARIANT)!!
+
+ // We don't generate new recursive constraints
+ val nestedTypeVariables = newConstrainingType.getNestedTypeVariables()
+ if (nestedTypeVariables.contains(bound.typeVariable) || nestedTypeVariables.contains(substitution.typeVariable)) return
+
+ val position = CompoundConstraintPosition(bound.position, substitution.position)
+ addBound(variable, Bound(newConstrainingType, newKind, position, newConstrainingType.isProper()))
+}
+
+private fun computeKindOfNewBound(constrainingKind: BoundKind, substitutionVariance: Variance, substitutionKind: BoundKind): BoundKind? {
+ // In examples below: List<out T>, MutableList<T>, Comparator<in T>, the variance of My<T> may be any.
+
+ // T <=> My<R>, R <=> Type -> T <=> My<Type>
+
+ // T < My<R>, R = Int -> T < My<Int>
+ if (substitutionKind == EXACT_BOUND) return constrainingKind
+
+ // T < MutableList<R>, R < Number - nothing can be inferred (R might become 'Int' later)
+ // todo T < MutableList<R>, R < Int => T < MutableList<out Int>
+ if (substitutionVariance == INVARIANT) return null
+
+ val kind = if (substitutionVariance == IN_VARIANCE) substitutionKind.reverse() else substitutionKind
+
+ // T = List<R>, R < Int -> T < List<Int>; T = Consumer<R>, R < Int -> T > Consumer<Int>
+ if (constrainingKind == EXACT_BOUND) return kind
+
+ // T < List<R>, R < Int -> T < List<Int>; T < Consumer<R>, R > Int -> T < Consumer<Int>
+ if (constrainingKind == kind) return kind
+
+ // otherwise we can generate no new constraints
+ return null
+}
\ No newline at end of file
diff --git a/core/descriptors/src/org/jetbrains/kotlin/types/TypeUtils.java b/core/descriptors/src/org/jetbrains/kotlin/types/TypeUtils.java
index 32cd73485298b..9c03dd4c489c4 100644
--- a/core/descriptors/src/org/jetbrains/kotlin/types/TypeUtils.java
+++ b/core/descriptors/src/org/jetbrains/kotlin/types/TypeUtils.java
@@ -37,6 +37,7 @@
import java.util.*;
+import static org.jetbrains.kotlin.resolve.calls.inference.InferencePackage.registerTypeVariables;
import static org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.ConstraintPositionKind.SPECIAL;
public class TypeUtils {
@@ -301,7 +302,7 @@ public Unit invoke(TypeParameterUsage parameterUsage) {
processAllTypeParameters(withParameters, Variance.INVARIANT, processor);
processAllTypeParameters(expected, Variance.INVARIANT, processor);
ConstraintSystemImpl constraintSystem = new ConstraintSystemImpl();
- constraintSystem.registerTypeVariables(parameters);
+ registerTypeVariables(constraintSystem, parameters);
constraintSystem.addSubtypeConstraint(withParameters, expected, SPECIAL.position());
return constraintSystem.getStatus().isSuccessful();
diff --git a/core/descriptors/src/org/jetbrains/kotlin/types/TypeUtils.kt b/core/descriptors/src/org/jetbrains/kotlin/types/TypeUtils.kt
index 071e9567383a4..43f0a4984a81a 100644
--- a/core/descriptors/src/org/jetbrains/kotlin/types/TypeUtils.kt
+++ b/core/descriptors/src/org/jetbrains/kotlin/types/TypeUtils.kt
@@ -16,23 +16,17 @@
package org.jetbrains.kotlin.types.typeUtil
-import java.util.LinkedHashSet
+import org.jetbrains.kotlin.builtins.KotlinBuiltIns
import org.jetbrains.kotlin.descriptors.CallableDescriptor
import org.jetbrains.kotlin.descriptors.ClassDescriptor
import org.jetbrains.kotlin.descriptors.DeclarationDescriptor
import org.jetbrains.kotlin.descriptors.TypeParameterDescriptor
-import org.jetbrains.kotlin.types.JetType
-import org.jetbrains.kotlin.types.Flexibility
-import org.jetbrains.kotlin.types.TypeConstructor
-import org.jetbrains.kotlin.utils.toReadOnlyList
-import org.jetbrains.kotlin.types.checker.JetTypeChecker
-import org.jetbrains.kotlin.builtins.KotlinBuiltIns
-import org.jetbrains.kotlin.types.isDynamic
-import org.jetbrains.kotlin.types.TypeProjection
-import org.jetbrains.kotlin.types.TypeProjectionImpl
import org.jetbrains.kotlin.descriptors.annotations.Annotations
import org.jetbrains.kotlin.resolve.DescriptorUtils
-import org.jetbrains.kotlin.types.DelegatingType
+import org.jetbrains.kotlin.types.*
+import org.jetbrains.kotlin.types.checker.JetTypeChecker
+import org.jetbrains.kotlin.utils.toReadOnlyList
+import java.util.*
private fun JetType.getContainedTypeParameters(): Collection<TypeParameterDescriptor> {
val declarationDescriptor = getConstructor().getDeclarationDescriptor()
@@ -63,7 +57,8 @@ fun DeclarationDescriptor.getCapturedTypeParameters(): Collection<TypeParameterD
public fun JetType.getContainedAndCapturedTypeParameterConstructors(): Collection<TypeConstructor> {
// todo type arguments (instead of type parameters) of the type of outer class must be considered; KT-6325
- val typeParameters = getContainedTypeParameters() + getConstructor().getDeclarationDescriptor()!!.getCapturedTypeParameters()
+ val capturedTypeParameters = getConstructor().getDeclarationDescriptor()?.getCapturedTypeParameters() ?: emptyList()
+ val typeParameters = getContainedTypeParameters() + capturedTypeParameters
return typeParameters.map { it.getTypeConstructor() }.toReadOnlyList()
}
@@ -90,4 +85,32 @@ public fun JetTypeChecker.equalTypesOrNulls(type1: JetType?, type2: JetType?): B
if (type1 identityEquals type2) return true
if (type1 == null || type2 == null) return false
return equalTypes(type1, type2)
+}
+
+fun JetType.getNestedTypeArguments(): List<TypeProjection> {
+ val result = ArrayList<TypeProjection>()
+
+ val stack = ArrayDeque<TypeProjection>()
+ stack.push(TypeProjectionImpl(this))
+
+ while (!stack.isEmpty()) {
+ val typeProjection = stack.pop()
+ if (typeProjection.isStarProjection()) continue
+
+ result.add(typeProjection)
+
+ val type = typeProjection.getType()
+
+ type.getConstructor().getParameters().zip(type.getArguments()).forEach {
+ val (parameter, argument) = it
+ val newTypeProjection = if (argument.getProjectionKind() == Variance.INVARIANT && parameter.getVariance() != Variance.INVARIANT) {
+ TypeProjectionImpl(parameter.getVariance(), argument.getType())
+ }
+ else {
+ argument
+ }
+ stack.add(newTypeProjection)
+ }
+ }
+ return result
}
\ No newline at end of file
diff --git a/idea/ide-common/src/org/jetbrains/kotlin/idea/util/FuzzyType.kt b/idea/ide-common/src/org/jetbrains/kotlin/idea/util/FuzzyType.kt
index 48f8687fbed8b..d2be701309b25 100644
--- a/idea/ide-common/src/org/jetbrains/kotlin/idea/util/FuzzyType.kt
+++ b/idea/ide-common/src/org/jetbrains/kotlin/idea/util/FuzzyType.kt
@@ -16,18 +16,15 @@
package org.jetbrains.kotlin.idea.util
-import org.jetbrains.kotlin.types.JetType
-import org.jetbrains.kotlin.descriptors.TypeParameterDescriptor
import org.jetbrains.kotlin.descriptors.CallableDescriptor
-import org.jetbrains.kotlin.utils.addIfNotNull
-import java.util.HashSet
+import org.jetbrains.kotlin.descriptors.TypeParameterDescriptor
import org.jetbrains.kotlin.resolve.calls.inference.ConstraintSystemImpl
-import java.util.LinkedHashMap
-import org.jetbrains.kotlin.types.Variance
-import org.jetbrains.kotlin.resolve.calls.inference.ConstraintsUtil
+import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.ConstraintPositionKind
+import org.jetbrains.kotlin.types.JetType
import org.jetbrains.kotlin.types.TypeSubstitutor
+import org.jetbrains.kotlin.types.Variance
import org.jetbrains.kotlin.types.typeUtil.isSubtypeOf
-import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.ConstraintPositionKind
+import java.util.HashSet
fun CallableDescriptor.fuzzyReturnType(): FuzzyType? {
val returnType = getReturnType() ?: return null
@@ -103,18 +100,14 @@ class FuzzyType(
}
val constraintSystem = ConstraintSystemImpl()
- val typeVariables = LinkedHashMap<TypeParameterDescriptor, Variance>()
- for (typeParameter in freeParameters) {
- typeVariables[typeParameter] = Variance.INVARIANT
- }
- constraintSystem.registerTypeVariables(typeVariables)
+ constraintSystem.registerTypeVariables(freeParameters, { Variance.INVARIANT })
when (matchKind) {
MatchKind.IS_SUBTYPE -> constraintSystem.addSubtypeConstraint(type, otherType, ConstraintPositionKind.SPECIAL.position())
MatchKind.IS_SUPERTYPE -> constraintSystem.addSubtypeConstraint(otherType, type, ConstraintPositionKind.SPECIAL.position())
}
- constraintSystem.processDeclaredBoundConstraints()
+ constraintSystem.fixVariables()
if (!constraintSystem.getStatus().hasContradiction()) {
// currently ConstraintSystem return successful status in case there are problems with nullability
|
6d69f7beadc0f23c9cedbc56332cd9712664c99b
|
drools
|
-update unit tests for LeftInputAdapterNode--git-svn-id: https://svn.jboss.org/repos/labs/trunk/labs/jbossrules@2283 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
p
|
https://github.com/kiegroup/drools
|
diff --git a/drools-core/src/test/java/org/drools/reteoo/LeftInputAdapterNodeTest.java b/drools-core/src/test/java/org/drools/reteoo/LeftInputAdapterNodeTest.java
index e4b89ea06d9..1f843d7f608 100644
--- a/drools-core/src/test/java/org/drools/reteoo/LeftInputAdapterNodeTest.java
+++ b/drools-core/src/test/java/org/drools/reteoo/LeftInputAdapterNodeTest.java
@@ -1,17 +1,19 @@
package org.drools.reteoo;
import java.util.List;
+import java.util.Map;
import org.drools.DroolsTestCase;
import org.drools.rule.Rule;
import org.drools.spi.PropagationContext;
+import org.drools.util.LinkedList;
+import org.drools.util.LinkedListNodeWrapper;
public class LeftInputAdapterNodeTest extends DroolsTestCase {
public void testLeftInputAdapterNode() {
MockObjectSource source = new MockObjectSource( 15 );
LeftInputAdapterNode liaNode = new LeftInputAdapterNode( 23,
- 0,
source );
assertEquals( 23,
liaNode.getId() );
@@ -31,7 +33,6 @@ public void testAttach() throws Exception {
MockObjectSource source = new MockObjectSource( 15 );
LeftInputAdapterNode liaNode = new LeftInputAdapterNode( 1,
- 0,
source );
assertEquals( 1,
@@ -62,34 +63,49 @@ public void testAssertObject() throws Exception {
WorkingMemoryImpl workingMemory = new WorkingMemoryImpl( new RuleBaseImpl() );
- MockObjectSource source = new MockObjectSource( 15 );
-
LeftInputAdapterNode liaNode = new LeftInputAdapterNode( 1,
- 0,
- source );
+ new MockObjectSource( 15 ) );
MockTupleSink sink = new MockTupleSink();
liaNode.addTupleSink( sink );
Object string1 = "cheese";
- FactHandleImpl handle1 = new FactHandleImpl( 1 );
-
- workingMemory.putObject( handle1,
- string1 );
-
- /* assert object */
- liaNode.assertObject( handle1,
+
+ // assert object
+ FactHandleImpl f0 = (FactHandleImpl) workingMemory.assertObject( string1 );
+ liaNode.assertObject( f0,
context,
workingMemory );
List asserted = sink.getAsserted();
assertLength( 1,
asserted );
-
- /* check tuple comes out */
- ReteTuple tuple = (ReteTuple) ((Object[]) asserted.get( 0 ))[0];
+ ReteTuple tuple0 = (ReteTuple) ((Object[]) asserted.get( 0 ))[0];
assertSame( string1,
- workingMemory.getObject( tuple.get( 0 ) ) );
+ workingMemory.getObject( tuple0.get( 0 ) ) );
+
+ // check node memory
+ Map map = (Map) workingMemory.getNodeMemory( liaNode );
+ LinkedList list0 = (LinkedList) (LinkedList)map.get( f0 );
+ assertEquals( 1, list0.size() );
+ assertSame( tuple0, ((LinkedListNodeWrapper)list0.getFirst()).getNode() );
+
+ // check memory stacks correctly
+ FactHandleImpl f1 = (FactHandleImpl) workingMemory.assertObject( "test1" );
+ liaNode.assertObject( f1,
+ context,
+ workingMemory );
+
+ assertLength( 2,
+ asserted );
+ ReteTuple tuple1 = (ReteTuple) ((Object[]) asserted.get( 1 ))[0];
+
+ LinkedList list1 = (LinkedList) (LinkedList)map.get( f1 );
+ assertEquals( 1, list1.size() );
+ assertSame( tuple1, ((LinkedListNodeWrapper)list1.getFirst()).getNode() );
+
+ assertNotSame( tuple0, tuple1 );
+
}
/**
@@ -109,39 +125,26 @@ public void testRetractObject() throws Exception {
MockObjectSource source = new MockObjectSource( 15 );
LeftInputAdapterNode liaNode = new LeftInputAdapterNode( 1,
- 0,
source );
MockTupleSink sink = new MockTupleSink();
liaNode.addTupleSink( sink );
- Object string1 = "cheese";
-
- FactHandleImpl handle1 = new FactHandleImpl( 1 );
-
- workingMemory.putObject( handle1,
- string1 );
+ FactHandleImpl f0 = (FactHandleImpl) workingMemory.assertObject( "f1" );
/* assert object */
- liaNode.assertObject( handle1,
+ liaNode.assertObject( f0,
context,
workingMemory );
ReteTuple tuple = (ReteTuple) ((Object[]) sink.getAsserted().get( 0 ))[0];
+
+ liaNode.retractObject( f0, context, workingMemory );
+
+ Map map = (Map) workingMemory.getNodeMemory( liaNode );
+ assertNull( map.get( f0 ) );
- ReteTuple previous = new ReteTuple(0, handle1, workingMemory);
- ReteTuple next = new ReteTuple(0, handle1, workingMemory);
-
- tuple.setPrevious( previous );
- tuple.setNext( next );
-
- tuple.remove( context, workingMemory );
-
- assertSame(previous.getNext(), next);
- assertSame(next.getPrevious(), previous);
+ assertSame( tuple, (ReteTuple) ((Object[]) sink.getRetracted().get( 0 ))[0] );
- assertNull(tuple.getPrevious());
- assertNull(tuple.getNext());
-
}
}
|
1722fa667819c3cafc6b5de93d7c93bbd570cd47
|
spring-framework
|
JSR-223 based StandardScriptFactory (including- <lang:std> support)--This commit also completes 4.2 schema variants in spring-context.--Issue: SPR-5215-
|
a
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-context/src/main/java/org/springframework/scripting/ScriptCompilationException.java b/spring-context/src/main/java/org/springframework/scripting/ScriptCompilationException.java
index ccb867450063..2064ae90800a 100644
--- a/spring-context/src/main/java/org/springframework/scripting/ScriptCompilationException.java
+++ b/spring-context/src/main/java/org/springframework/scripting/ScriptCompilationException.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2012 the original author or authors.
+ * Copyright 2002-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -41,8 +41,7 @@ public ScriptCompilationException(String msg) {
/**
* Constructor for ScriptCompilationException.
* @param msg the detail message
- * @param cause the root cause (usually from using an underlying
- * script compiler API)
+ * @param cause the root cause (usually from using an underlying script compiler API)
*/
public ScriptCompilationException(String msg, Throwable cause) {
super(msg, cause);
@@ -51,23 +50,32 @@ public ScriptCompilationException(String msg, Throwable cause) {
/**
* Constructor for ScriptCompilationException.
* @param scriptSource the source for the offending script
- * @param cause the root cause (usually from using an underlying
- * script compiler API)
+ * @param msg the detail message
+ * @since 4.2
+ */
+ public ScriptCompilationException(ScriptSource scriptSource, String msg) {
+ super("Could not compile " + scriptSource + ": " + msg);
+ this.scriptSource = scriptSource;
+ }
+
+ /**
+ * Constructor for ScriptCompilationException.
+ * @param scriptSource the source for the offending script
+ * @param cause the root cause (usually from using an underlying script compiler API)
*/
public ScriptCompilationException(ScriptSource scriptSource, Throwable cause) {
- super("Could not compile script", cause);
+ super("Could not compile " + scriptSource, cause);
this.scriptSource = scriptSource;
}
/**
* Constructor for ScriptCompilationException.
- * @param msg the detail message
* @param scriptSource the source for the offending script
- * @param cause the root cause (usually from using an underlying
- * script compiler API)
+ * @param msg the detail message
+ * @param cause the root cause (usually from using an underlying script compiler API)
*/
public ScriptCompilationException(ScriptSource scriptSource, String msg, Throwable cause) {
- super("Could not compile script [" + scriptSource + "]: " + msg, cause);
+ super("Could not compile " + scriptSource + ": " + msg, cause);
this.scriptSource = scriptSource;
}
diff --git a/spring-context/src/main/java/org/springframework/scripting/config/LangNamespaceHandler.java b/spring-context/src/main/java/org/springframework/scripting/config/LangNamespaceHandler.java
index b7888faa7817..8a113e9905a9 100644
--- a/spring-context/src/main/java/org/springframework/scripting/config/LangNamespaceHandler.java
+++ b/spring-context/src/main/java/org/springframework/scripting/config/LangNamespaceHandler.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2012 the original author or authors.
+ * Copyright 2002-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -44,6 +44,7 @@ public void init() {
registerScriptBeanDefinitionParser("groovy", "org.springframework.scripting.groovy.GroovyScriptFactory");
registerScriptBeanDefinitionParser("jruby", "org.springframework.scripting.jruby.JRubyScriptFactory");
registerScriptBeanDefinitionParser("bsh", "org.springframework.scripting.bsh.BshScriptFactory");
+ registerScriptBeanDefinitionParser("std", "org.springframework.scripting.support.StandardScriptFactory");
registerBeanDefinitionParser("defaults", new ScriptingDefaultsParser());
}
diff --git a/spring-context/src/main/java/org/springframework/scripting/config/ScriptBeanDefinitionParser.java b/spring-context/src/main/java/org/springframework/scripting/config/ScriptBeanDefinitionParser.java
index de5aa7ea9ddb..67c9a3fe0b99 100644
--- a/spring-context/src/main/java/org/springframework/scripting/config/ScriptBeanDefinitionParser.java
+++ b/spring-context/src/main/java/org/springframework/scripting/config/ScriptBeanDefinitionParser.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2013 the original author or authors.
+ * Copyright 2002-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -55,6 +55,8 @@
*/
class ScriptBeanDefinitionParser extends AbstractBeanDefinitionParser {
+ private static final String ENGINE_ATTRIBUTE = "engine";
+
private static final String SCRIPT_SOURCE_ATTRIBUTE = "script-source";
private static final String INLINE_SCRIPT_ELEMENT = "inline-script";
@@ -104,6 +106,9 @@ public ScriptBeanDefinitionParser(String scriptFactoryClassName) {
@Override
@SuppressWarnings("deprecation")
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
+ // Engine attribute only supported for <lang:std>
+ String engine = element.getAttribute(ENGINE_ATTRIBUTE);
+
// Resolve the script source.
String value = resolveScriptSource(element, parserContext.getReaderContext());
if (value == null) {
@@ -184,9 +189,13 @@ else if (beanDefinitionDefaults.getDestroyMethodName() != null) {
// Add constructor arguments.
ConstructorArgumentValues cav = bd.getConstructorArgumentValues();
int constructorArgNum = 0;
+ if (StringUtils.hasLength(engine)) {
+ cav.addIndexedArgumentValue(constructorArgNum++, engine);
+ }
cav.addIndexedArgumentValue(constructorArgNum++, value);
if (element.hasAttribute(SCRIPT_INTERFACES_ATTRIBUTE)) {
- cav.addIndexedArgumentValue(constructorArgNum++, element.getAttribute(SCRIPT_INTERFACES_ATTRIBUTE));
+ cav.addIndexedArgumentValue(
+ constructorArgNum++, element.getAttribute(SCRIPT_INTERFACES_ATTRIBUTE), "java.lang.Class[]");
}
// This is used for Groovy. It's a bean reference to a customizer bean.
diff --git a/spring-context/src/main/java/org/springframework/scripting/support/StandardScriptFactory.java b/spring-context/src/main/java/org/springframework/scripting/support/StandardScriptFactory.java
new file mode 100644
index 000000000000..8c6b8c979364
--- /dev/null
+++ b/spring-context/src/main/java/org/springframework/scripting/support/StandardScriptFactory.java
@@ -0,0 +1,248 @@
+/*
+ * Copyright 2002-2015 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.scripting.support;
+
+import java.io.IOException;
+import javax.script.Invocable;
+import javax.script.ScriptEngine;
+import javax.script.ScriptEngineManager;
+
+import org.springframework.beans.BeanUtils;
+import org.springframework.beans.factory.BeanClassLoaderAware;
+import org.springframework.scripting.ScriptCompilationException;
+import org.springframework.scripting.ScriptFactory;
+import org.springframework.scripting.ScriptSource;
+import org.springframework.util.Assert;
+import org.springframework.util.ClassUtils;
+import org.springframework.util.ObjectUtils;
+import org.springframework.util.StringUtils;
+
+/**
+ * {@link org.springframework.scripting.ScriptFactory} implementation based
+ * on the JSR-223 script engine abstraction (as included in Java 6+).
+ * Supports JavaScript, Groovy, JRuby and other JSR-223 compliant engines.
+ *
+ * <p>Typically used in combination with a
+ * {@link org.springframework.scripting.support.ScriptFactoryPostProcessor};
+ * see the latter's javadoc for a configuration example.
+ *
+ * @author Juergen Hoeller
+ * @since 4.2
+ * @see ScriptFactoryPostProcessor
+ */
+public class StandardScriptFactory implements ScriptFactory, BeanClassLoaderAware {
+
+ private final String scriptEngineName;
+
+ private final String scriptSourceLocator;
+
+ private final Class<?>[] scriptInterfaces;
+
+ private ClassLoader beanClassLoader = ClassUtils.getDefaultClassLoader();
+
+ private volatile ScriptEngine scriptEngine;
+
+
+ /**
+ * Create a new StandardScriptFactory for the given script source.
+ * @param scriptSourceLocator a locator that points to the source of the script.
+ * Interpreted by the post-processor that actually creates the script.
+ */
+ public StandardScriptFactory(String scriptSourceLocator) {
+ this(null, scriptSourceLocator, (Class<?>[]) null);
+ }
+
+ /**
+ * Create a new StandardScriptFactory for the given script source.
+ * @param scriptSourceLocator a locator that points to the source of the script.
+ * Interpreted by the post-processor that actually creates the script.
+ * @param scriptInterfaces the Java interfaces that the scripted object
+ * is supposed to implement
+ */
+ public StandardScriptFactory(String scriptSourceLocator, Class<?>... scriptInterfaces) {
+ this(null, scriptSourceLocator, scriptInterfaces);
+ }
+
+ /**
+ * Create a new StandardScriptFactory for the given script source.
+ * @param scriptEngineName the name of the JSR-223 ScriptEngine to use
+ * (explicitly given instead of inferred from the script source)
+ * @param scriptSourceLocator a locator that points to the source of the script.
+ * Interpreted by the post-processor that actually creates the script.
+ */
+ public StandardScriptFactory(String scriptEngineName, String scriptSourceLocator) {
+ this(scriptEngineName, scriptSourceLocator, (Class<?>[]) null);
+ }
+
+ /**
+ * Create a new StandardScriptFactory for the given script source.
+ * @param scriptEngineName the name of the JSR-223 ScriptEngine to use
+ * (explicitly given instead of inferred from the script source)
+ * @param scriptSourceLocator a locator that points to the source of the script.
+ * Interpreted by the post-processor that actually creates the script.
+ * @param scriptInterfaces the Java interfaces that the scripted object
+ * is supposed to implement
+ */
+ public StandardScriptFactory(String scriptEngineName, String scriptSourceLocator, Class<?>... scriptInterfaces) {
+ Assert.hasText(scriptSourceLocator, "'scriptSourceLocator' must not be empty");
+ this.scriptEngineName = scriptEngineName;
+ this.scriptSourceLocator = scriptSourceLocator;
+ this.scriptInterfaces = scriptInterfaces;
+ }
+
+
+ @Override
+ public void setBeanClassLoader(ClassLoader classLoader) {
+ this.beanClassLoader = classLoader;
+ }
+
+ protected ScriptEngine retrieveScriptEngine(ScriptSource scriptSource) {
+ ScriptEngineManager scriptEngineManager = new ScriptEngineManager(this.beanClassLoader);
+ if (this.scriptEngineName != null) {
+ ScriptEngine engine = scriptEngineManager.getEngineByName(this.scriptEngineName);
+ if (engine == null) {
+ throw new IllegalStateException("Script engine named '" + this.scriptEngineName + "' not found");
+ }
+ return engine;
+ }
+ if (scriptSource instanceof ResourceScriptSource) {
+ String filename = ((ResourceScriptSource) scriptSource).getResource().getFilename();
+ if (filename != null) {
+ String extension = StringUtils.getFilenameExtension(filename);
+ if (extension != null) {
+ ScriptEngine engine = scriptEngineManager.getEngineByExtension(extension);
+ if (engine != null) {
+ return engine;
+ }
+ }
+ }
+ }
+ return null;
+ }
+
+
+ @Override
+ public String getScriptSourceLocator() {
+ return this.scriptSourceLocator;
+ }
+
+ @Override
+ public Class<?>[] getScriptInterfaces() {
+ return this.scriptInterfaces;
+ }
+
+ @Override
+ public boolean requiresConfigInterface() {
+ return false;
+ }
+
+
+ /**
+ * Load and parse the script via JSR-223's ScriptEngine.
+ */
+ @Override
+ public Object getScriptedObject(ScriptSource scriptSource, Class<?>... actualInterfaces)
+ throws IOException, ScriptCompilationException {
+
+ Object script;
+
+ try {
+ if (this.scriptEngine == null) {
+ this.scriptEngine = retrieveScriptEngine(scriptSource);
+ if (this.scriptEngine == null) {
+ throw new IllegalStateException("Could not determine script engine for " + scriptSource);
+ }
+ }
+ script = this.scriptEngine.eval(scriptSource.getScriptAsString());
+ }
+ catch (Exception ex) {
+ throw new ScriptCompilationException(scriptSource, ex);
+ }
+
+ if (!ObjectUtils.isEmpty(actualInterfaces)) {
+ boolean adaptationRequired = false;
+ for (Class<?> requestedIfc : actualInterfaces) {
+ if (!requestedIfc.isInstance(script)) {
+ adaptationRequired = true;
+ }
+ }
+ if (adaptationRequired) {
+ Class<?> adaptedIfc;
+ if (actualInterfaces.length == 1) {
+ adaptedIfc = actualInterfaces[0];
+ }
+ else {
+ adaptedIfc = ClassUtils.createCompositeInterface(actualInterfaces, this.beanClassLoader);
+ }
+ if (adaptedIfc != null) {
+ if (!(this.scriptEngine instanceof Invocable)) {
+ throw new ScriptCompilationException(scriptSource,
+ "ScriptEngine must implement Invocable in order to adapt it to an interface: " +
+ this.scriptEngine);
+ }
+ Invocable invocable = (Invocable) this.scriptEngine;
+ if (script != null) {
+ script = invocable.getInterface(script, adaptedIfc);
+ }
+ if (script == null) {
+ script = invocable.getInterface(adaptedIfc);
+ if (script == null) {
+ throw new ScriptCompilationException(scriptSource,
+ "Could not adapt script to interface [" + adaptedIfc.getName() + "]");
+ }
+ }
+ }
+ }
+ }
+
+ if (script instanceof Class) {
+ Class<?> scriptClass = (Class<?>) script;
+ try {
+ return scriptClass.newInstance();
+ }
+ catch (InstantiationException ex) {
+ throw new ScriptCompilationException(
+ scriptSource, "Could not instantiate script class: " + scriptClass.getName(), ex);
+ }
+ catch (IllegalAccessException ex) {
+ throw new ScriptCompilationException(
+ scriptSource, "Could not access script constructor: " + scriptClass.getName(), ex);
+ }
+ }
+
+ return script;
+ }
+
+ @Override
+ public Class<?> getScriptedObjectType(ScriptSource scriptSource)
+ throws IOException, ScriptCompilationException {
+
+ return null;
+ }
+
+ @Override
+ public boolean requiresScriptedObjectRefresh(ScriptSource scriptSource) {
+ return scriptSource.isModified();
+ }
+
+
+ @Override
+ public String toString() {
+ return "StandardScriptFactory: script source locator [" + this.scriptSourceLocator + "]";
+ }
+
+}
diff --git a/spring-context/src/main/resources/META-INF/spring.schemas b/spring-context/src/main/resources/META-INF/spring.schemas
index 26ff291eed35..3cac0ddd7997 100644
--- a/spring-context/src/main/resources/META-INF/spring.schemas
+++ b/spring-context/src/main/resources/META-INF/spring.schemas
@@ -4,7 +4,8 @@ http\://www.springframework.org/schema/context/spring-context-3.1.xsd=org/spring
http\://www.springframework.org/schema/context/spring-context-3.2.xsd=org/springframework/context/config/spring-context-3.2.xsd
http\://www.springframework.org/schema/context/spring-context-4.0.xsd=org/springframework/context/config/spring-context-4.0.xsd
http\://www.springframework.org/schema/context/spring-context-4.1.xsd=org/springframework/context/config/spring-context-4.1.xsd
-http\://www.springframework.org/schema/context/spring-context.xsd=org/springframework/context/config/spring-context-4.1.xsd
+http\://www.springframework.org/schema/context/spring-context-4.2.xsd=org/springframework/context/config/spring-context-4.2.xsd
+http\://www.springframework.org/schema/context/spring-context.xsd=org/springframework/context/config/spring-context-4.2.xsd
http\://www.springframework.org/schema/jee/spring-jee-2.0.xsd=org/springframework/ejb/config/spring-jee-2.0.xsd
http\://www.springframework.org/schema/jee/spring-jee-2.5.xsd=org/springframework/ejb/config/spring-jee-2.5.xsd
http\://www.springframework.org/schema/jee/spring-jee-3.0.xsd=org/springframework/ejb/config/spring-jee-3.0.xsd
@@ -12,7 +13,8 @@ http\://www.springframework.org/schema/jee/spring-jee-3.1.xsd=org/springframewor
http\://www.springframework.org/schema/jee/spring-jee-3.2.xsd=org/springframework/ejb/config/spring-jee-3.2.xsd
http\://www.springframework.org/schema/jee/spring-jee-4.0.xsd=org/springframework/ejb/config/spring-jee-4.0.xsd
http\://www.springframework.org/schema/jee/spring-jee-4.1.xsd=org/springframework/ejb/config/spring-jee-4.1.xsd
-http\://www.springframework.org/schema/jee/spring-jee.xsd=org/springframework/ejb/config/spring-jee-4.1.xsd
+http\://www.springframework.org/schema/jee/spring-jee-4.2.xsd=org/springframework/ejb/config/spring-jee-4.2.xsd
+http\://www.springframework.org/schema/jee/spring-jee.xsd=org/springframework/ejb/config/spring-jee-4.2.xsd
http\://www.springframework.org/schema/lang/spring-lang-2.0.xsd=org/springframework/scripting/config/spring-lang-2.0.xsd
http\://www.springframework.org/schema/lang/spring-lang-2.5.xsd=org/springframework/scripting/config/spring-lang-2.5.xsd
http\://www.springframework.org/schema/lang/spring-lang-3.0.xsd=org/springframework/scripting/config/spring-lang-3.0.xsd
@@ -20,15 +22,18 @@ http\://www.springframework.org/schema/lang/spring-lang-3.1.xsd=org/springframew
http\://www.springframework.org/schema/lang/spring-lang-3.2.xsd=org/springframework/scripting/config/spring-lang-3.2.xsd
http\://www.springframework.org/schema/lang/spring-lang-4.0.xsd=org/springframework/scripting/config/spring-lang-4.0.xsd
http\://www.springframework.org/schema/lang/spring-lang-4.1.xsd=org/springframework/scripting/config/spring-lang-4.1.xsd
-http\://www.springframework.org/schema/lang/spring-lang.xsd=org/springframework/scripting/config/spring-lang-4.1.xsd
+http\://www.springframework.org/schema/lang/spring-lang-4.2.xsd=org/springframework/scripting/config/spring-lang-4.2.xsd
+http\://www.springframework.org/schema/lang/spring-lang.xsd=org/springframework/scripting/config/spring-lang-4.2.xsd
http\://www.springframework.org/schema/task/spring-task-3.0.xsd=org/springframework/scheduling/config/spring-task-3.0.xsd
http\://www.springframework.org/schema/task/spring-task-3.1.xsd=org/springframework/scheduling/config/spring-task-3.1.xsd
http\://www.springframework.org/schema/task/spring-task-3.2.xsd=org/springframework/scheduling/config/spring-task-3.2.xsd
http\://www.springframework.org/schema/task/spring-task-4.0.xsd=org/springframework/scheduling/config/spring-task-4.0.xsd
http\://www.springframework.org/schema/task/spring-task-4.1.xsd=org/springframework/scheduling/config/spring-task-4.1.xsd
-http\://www.springframework.org/schema/task/spring-task.xsd=org/springframework/scheduling/config/spring-task-4.1.xsd
+http\://www.springframework.org/schema/task/spring-task-4.2.xsd=org/springframework/scheduling/config/spring-task-4.2.xsd
+http\://www.springframework.org/schema/task/spring-task.xsd=org/springframework/scheduling/config/spring-task-4.2.xsd
http\://www.springframework.org/schema/cache/spring-cache-3.1.xsd=org/springframework/cache/config/spring-cache-3.1.xsd
http\://www.springframework.org/schema/cache/spring-cache-3.2.xsd=org/springframework/cache/config/spring-cache-3.2.xsd
http\://www.springframework.org/schema/cache/spring-cache-4.0.xsd=org/springframework/cache/config/spring-cache-4.0.xsd
http\://www.springframework.org/schema/cache/spring-cache-4.1.xsd=org/springframework/cache/config/spring-cache-4.1.xsd
-http\://www.springframework.org/schema/cache/spring-cache.xsd=org/springframework/cache/config/spring-cache-4.1.xsd
+http\://www.springframework.org/schema/cache/spring-cache-4.2.xsd=org/springframework/cache/config/spring-cache-4.2.xsd
+http\://www.springframework.org/schema/cache/spring-cache.xsd=org/springframework/cache/config/spring-cache-4.2.xsd
diff --git a/spring-context/src/main/resources/org/springframework/cache/config/spring-cache-4.2.xsd b/spring-context/src/main/resources/org/springframework/cache/config/spring-cache-4.2.xsd
new file mode 100644
index 000000000000..66104b98dbe2
--- /dev/null
+++ b/spring-context/src/main/resources/org/springframework/cache/config/spring-cache-4.2.xsd
@@ -0,0 +1,310 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+
+<xsd:schema xmlns="http://www.springframework.org/schema/cache"
+ xmlns:xsd="http://www.w3.org/2001/XMLSchema"
+ xmlns:beans="http://www.springframework.org/schema/beans"
+ xmlns:tool="http://www.springframework.org/schema/tool"
+ targetNamespace="http://www.springframework.org/schema/cache"
+ elementFormDefault="qualified"
+ attributeFormDefault="unqualified">
+
+ <xsd:import namespace="http://www.springframework.org/schema/beans" schemaLocation="http://www.springframework.org/schema/beans/spring-beans-4.1.xsd"/>
+ <xsd:import namespace="http://www.springframework.org/schema/tool" schemaLocation="http://www.springframework.org/schema/tool/spring-tool-4.1.xsd"/>
+
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Defines the elements used in the Spring Framework's declarative
+ cache management infrastructure.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+
+ <xsd:element name="annotation-driven">
+ <xsd:complexType>
+ <xsd:annotation>
+ <xsd:documentation source="java:org.springframework.cache.annotation.AnnotationCacheOperationDefinitionSource"><![CDATA[
+ Indicates that cache configuration is defined by Java 5
+ annotations on bean classes, and that proxies are automatically
+ to be created for the relevant annotated beans.
+
+ The default annotations supported are Spring's @Cacheable, @CachePut and @CacheEvict. If
+ spring-context-support and the JSR-107 API are on the classpath, additional proxies are
+ automatically created for JSR-107 annotated beans, that is @CacheResult, @CachePut,
+ @CacheRemove and @CacheRemoveAll.
+
+ See org.springframework.cache.annotation.EnableCaching Javadoc
+ for information on code-based alternatives to this XML element.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:attribute name="cache-manager" type="xsd:string" default="cacheManager">
+ <xsd:annotation>
+ <xsd:documentation source="java:org.springframework.cache.CacheManager"><![CDATA[
+ The bean name of the CacheManager that is to be used to retrieve the backing
+ caches. A default CacheResolver will be initialized behind the scenes with
+ this cache manager (or "cacheManager" if not set). For more fine-grained
+ management of the cache resolution, consider setting the 'cache-resolver'
+ attribute.
+
+ Note that this attribute is still mandatory if you are using JSR-107 as an
+ additional exception cache resolver should be created and requires a CacheManager
+ to do so.
+ ]]></xsd:documentation>
+ <xsd:appinfo>
+ <tool:annotation kind="ref">
+ <tool:expected-type type="org.springframework.cache.CacheManager"/>
+ </tool:annotation>
+ </xsd:appinfo>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="cache-resolver" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation source="java:org.springframework.cache.interceptor.CacheResolver"><![CDATA[
+ The bean name of the CacheResolver that is to be used to resolve the backing caches.
+
+ This attribute is not required, and only needs to be specified as an alternative to
+ the 'cache-manager' attribute. See the javadoc of CacheResolver for more details.
+ ]]></xsd:documentation>
+ <xsd:appinfo>
+ <tool:annotation kind="ref">
+ <tool:expected-type type="org.springframework.cache.interceptor.CacheResolver"/>
+ </tool:annotation>
+ </xsd:appinfo>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="key-generator" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation source="java:org.springframework.cache.interceptor.KeyGenerator"><![CDATA[
+ The bean name of the KeyGenerator that is to be used to retrieve the backing caches.
+
+ This attribute is not required, and only needs to be specified
+ explicitly if the default strategy (DefaultKeyGenerator) is not sufficient.
+ ]]></xsd:documentation>
+ <xsd:appinfo>
+ <tool:annotation kind="ref">
+ <tool:expected-type type="org.springframework.cache.interceptor.KeyGenerator"/>
+ </tool:annotation>
+ </xsd:appinfo>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="error-handler" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation source="java:org.springframework.cache.interceptor.CacheErrorHandler"><![CDATA[
+ The bean name of the CacheErrorHandler that is to be used to handle cache-related errors.
+
+ This attribute is not required, and only needs to be specified
+ explicitly if the default strategy (SimpleCacheErrorHandler) is not sufficient.
+ ]]></xsd:documentation>
+ <xsd:appinfo>
+ <tool:annotation kind="ref">
+ <tool:expected-type type="org.springframework.cache.interceptor.CacheErrorHandler"/>
+ </tool:annotation>
+ </xsd:appinfo>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="mode" default="proxy">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Should annotated beans be proxied using Spring's AOP framework,
+ or should they rather be weaved with an AspectJ transaction aspect?
+
+ AspectJ weaving requires spring-aspects.jar on the classpath,
+ as well as load-time weaving (or compile-time weaving) enabled.
+
+ Note: The weaving-based aspect requires the @Cacheable and @CacheInvalidate
+ annotations to be defined on the concrete class. Annotations in interfaces
+ will not work in that case (they will rather only work with interface-based proxies)!
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:simpleType>
+ <xsd:restriction base="xsd:string">
+ <xsd:enumeration value="proxy"/>
+ <xsd:enumeration value="aspectj"/>
+ </xsd:restriction>
+ </xsd:simpleType>
+ </xsd:attribute>
+ <xsd:attribute name="proxy-target-class" type="xsd:boolean" default="false">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Are class-based (CGLIB) proxies to be created? By default, standard
+ Java interface-based proxies are created.
+
+ Note: Class-based proxies require the @Cacheable and @CacheInvalidate annotations
+ to be defined on the concrete class. Annotations in interfaces will not work
+ in that case (they will rather only work with interface-based proxies)!
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="order" type="xsd:token">
+ <xsd:annotation>
+ <xsd:documentation source="java:org.springframework.core.Ordered"><![CDATA[
+ Controls the ordering of the execution of the cache advisor
+ when multiple advice executes at a specific joinpoint.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ </xsd:complexType>
+ </xsd:element>
+
+ <xsd:element name="advice">
+ <xsd:complexType>
+ <xsd:annotation>
+ <xsd:documentation source="java:org.springframework.cache.interceptor.CacheInterceptor"><![CDATA[
+ Defines the cache semantics of the AOP advice that is to be
+ executed.
+
+ That is, this advice element is where the cacheable semantics of
+ any number of methods are defined (where cacheable semantics
+ includes the backing cache(s), the key, cache condition rules, and suchlike).
+ ]]></xsd:documentation>
+ <xsd:appinfo>
+ <tool:annotation>
+ <tool:exports type="java:org.springframework.cache.interceptor.CacheInterceptor"/>
+ </tool:annotation>
+ </xsd:appinfo>
+ </xsd:annotation>
+ <xsd:complexContent>
+ <xsd:extension base="beans:identifiedType">
+ <xsd:sequence>
+ <xsd:element name="caching" type="definitionsType" minOccurs="0" maxOccurs="unbounded"/>
+ </xsd:sequence>
+ <xsd:attribute name="cache-manager" type="xsd:string" default="cacheManager">
+ <xsd:annotation>
+ <xsd:documentation source="java:org.springframework.cache.CacheManager"><![CDATA[
+ The bean name of the CacheManager that is to be used
+ for storing and retrieving data.
+
+ This attribute is not required, and only needs to be specified
+ explicitly if the bean name of the desired CacheManager
+ is not 'cacheManager'.
+ ]]></xsd:documentation>
+ <xsd:appinfo>
+ <tool:annotation kind="ref">
+ <tool:expected-type type="org.springframework.cache.CacheManager"/>
+ </tool:annotation>
+ </xsd:appinfo>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="key-generator" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation source="java:org.springframework.cache.interceptor.KeyGenerator"><![CDATA[
+ The bean name of the KeyGenerator that is to be used to retrieve the backing caches.
+
+ This attribute is not required, and only needs to be specified
+ explicitly if the default strategy (DefaultKeyGenerator) is not sufficient.
+ ]]></xsd:documentation>
+ <xsd:appinfo>
+ <tool:annotation kind="ref">
+ <tool:expected-type type="org.springframework.cache.interceptor.KeyGenerator"/>
+ </tool:annotation>
+ </xsd:appinfo>
+ </xsd:annotation>
+ </xsd:attribute>
+ </xsd:extension>
+ </xsd:complexContent>
+ </xsd:complexType>
+ </xsd:element>
+
+ <xsd:complexType name="basedefinitionType">
+ <xsd:attribute name="cache" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The name of the backing cache(s). Multiple caches can be specified by separating them using comma: 'orders, books']]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="key" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The SpEL expression used for computing the cache key, mutually exclusive with the key-generator parameter.]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="key-generator" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The name of the KeyGenerator bean responsible to compute the key, mutually exclusive with the key parameter.]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="cache-manager" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The name of the CacheManager bean responsible to manage the operation.]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="condition" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The SpEL expression used for conditioning the method caching.]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="method" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The method name(s) with which the cache attributes are to be
+ associated. The wildcard (*) character can be used to associate the
+ same cache attribute settings with a number of methods; for
+ example, 'get*', 'handle*', '*Order', 'on*Event', etc.]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+
+ </xsd:complexType>
+
+ <xsd:complexType name="definitionsType">
+ <xsd:complexContent>
+ <xsd:extension base="basedefinitionType">
+ <xsd:sequence>
+ <xsd:choice minOccurs="0" maxOccurs="unbounded">
+ <xsd:element name="cacheable" minOccurs="0" maxOccurs="unbounded">
+ <xsd:complexType>
+ <xsd:complexContent>
+ <xsd:extension base="basedefinitionType">
+ <xsd:attribute name="unless" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The SpEL expression used to veto the method caching.]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ </xsd:extension>
+ </xsd:complexContent>
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="cache-put" minOccurs="0" maxOccurs="unbounded">
+ <xsd:complexType>
+ <xsd:complexContent>
+ <xsd:extension base="basedefinitionType">
+ <xsd:attribute name="unless" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The SpEL expression used to veto the method caching.]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ </xsd:extension>
+ </xsd:complexContent>
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="cache-evict" minOccurs="0" maxOccurs="unbounded">
+ <xsd:complexType>
+ <xsd:complexContent>
+ <xsd:extension base="basedefinitionType">
+ <xsd:attribute name="all-entries" type="xsd:boolean" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Whether all the entries should be evicted.]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="before-invocation" type="xsd:boolean" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Whether the eviction should occur after the method is successfully
+ invoked (default) or before.]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+
+ </xsd:extension>
+ </xsd:complexContent>
+ </xsd:complexType>
+ </xsd:element>
+ </xsd:choice>
+ </xsd:sequence>
+ </xsd:extension>
+ </xsd:complexContent>
+ </xsd:complexType>
+
+</xsd:schema>
diff --git a/spring-context/src/main/resources/org/springframework/context/config/spring-context-4.2.xsd b/spring-context/src/main/resources/org/springframework/context/config/spring-context-4.2.xsd
new file mode 100644
index 000000000000..d9aeea34c1ae
--- /dev/null
+++ b/spring-context/src/main/resources/org/springframework/context/config/spring-context-4.2.xsd
@@ -0,0 +1,520 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<xsd:schema xmlns="http://www.springframework.org/schema/context"
+ xmlns:xsd="http://www.w3.org/2001/XMLSchema"
+ xmlns:beans="http://www.springframework.org/schema/beans"
+ xmlns:tool="http://www.springframework.org/schema/tool"
+ targetNamespace="http://www.springframework.org/schema/context"
+ elementFormDefault="qualified"
+ attributeFormDefault="unqualified">
+
+ <xsd:import namespace="http://www.springframework.org/schema/beans" schemaLocation="http://www.springframework.org/schema/beans/spring-beans-4.1.xsd"/>
+ <xsd:import namespace="http://www.springframework.org/schema/tool" schemaLocation="http://www.springframework.org/schema/tool/spring-tool-4.1.xsd"/>
+
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Defines the configuration elements for the Spring Framework's application
+ context support. Effects the activation of various configuration styles
+ for the containing Spring ApplicationContext.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+
+ <xsd:complexType name="propertyPlaceholder">
+ <xsd:attribute name="location" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The location of the properties file to resolve placeholders against, as a Spring
+ resource location: a URL, a "classpath:" pseudo URL, or a relative file path.
+ Multiple locations may be specified, separated by commas. If neither location nor
+ properties-ref is specified, placeholders will be resolved against system properties.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="properties-ref" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation source="java:java.util.Properties"><![CDATA[
+ The bean name of a Properties object that will be used for property substitution.
+ If neither location nor properties-ref is specified, placeholders will be resolved
+ against system properties.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="file-encoding" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Specifies the encoding to use for parsing properties files. Default is none,
+ using the java.util.Properties default encoding. Only applies to classic
+ properties files, not to XML files.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="order" type="xsd:token">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Specifies the order for this placeholder configurer. If more than one is present
+ in a context, the order can be important since the first one to be match a
+ placeholder will win.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="ignore-resource-not-found" type="xsd:boolean" default="false">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Specifies if failure to find the property resource location should be ignored.
+ Default is "false", meaning that if there is no file in the location specified
+ an exception will be raised at runtime.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="ignore-unresolvable" type="xsd:boolean" default="false">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Specifies if failure to find the property value to replace a key should be ignored.
+ Default is "false", meaning that this placeholder configurer will raise an exception
+ if it cannot resolve a key. Set to "true" to allow the configurer to pass on the key
+ to any others in the context that have not yet visited the key in question.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="local-override" type="xsd:boolean" default="false">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Specifies whether local properties override properties from files.
+ Default is "false": Properties from files override local defaults.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ </xsd:complexType>
+
+ <xsd:element name="property-placeholder">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Activates replacement of ${...} placeholders by registering a
+ PropertySourcesPlaceholderConfigurer within the application context. Properties will
+ be resolved against the specified properties file or Properties object -- so called
+ "local properties", if any, and against the Spring Environment's current set of
+ PropertySources.
+
+ Note that as of Spring 3.1 the system-properties-mode attribute has been removed in
+ favor of the more flexible PropertySources mechanism. However, Spring 3.1-based
+ applications may continue to use the 3.0 (and older) versions of the spring-context
+ schema in order to preserve system-properties-mode behavior. In this case, the
+ traditional PropertyPlaceholderConfigurer component will be registered instead of the
+ new PropertySourcesPlaceholderConfigurer.
+
+ See ConfigurableEnvironment javadoc for more information on using.
+ ]]></xsd:documentation>
+ <xsd:appinfo>
+ <tool:annotation>
+ <tool:exports type="org.springframework.context.support.PropertySourcesPlaceholderConfigurer"/>
+ </tool:annotation>
+ </xsd:appinfo>
+ </xsd:annotation>
+ <xsd:complexType>
+ <xsd:complexContent>
+ <xsd:extension base="propertyPlaceholder">
+ <xsd:attribute name="system-properties-mode" default="ENVIRONMENT">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Controls how to resolve placeholders against system properties. As of Spring 3.1, this
+ attribute value defaults to "ENVIRONMENT", indicating that resolution of placeholders
+ against system properties is handled via PropertySourcesPlaceholderConfigurer and its
+ delegation to the current Spring Environment object.
+
+ For maximum backward compatibility, this attribute is preserved going forward with the
+ 3.1 version of the context schema, and any values other than the default "ENVIRONMENT"
+ will cause a traditional PropertyPlaceholderConfigurer to be registered instead of the
+ newer PropertySourcesPlaceholderConfigurer variant. In this case, the Spring Environment
+ and its property sources are not interrogated when resolving placeholders. Users are
+ encouraged to consider this attribute deprecated, and to take advantage of
+ Environment/PropertySource mechanisms. See ConfigurableEnvironment javadoc for examples.
+
+ "ENVIRONMENT" indicates placeholders should be resolved against the current Environment and against any local properties;
+ "NEVER" indicates placeholders should be resolved only against local properties and never against system properties;
+ "FALLBACK" indicates placeholders should be resolved against any local properties and then against system properties;
+ "OVERRIDE" indicates placeholders should be resolved first against system properties and then against any local properties;
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:simpleType>
+ <xsd:restriction base="xsd:string">
+ <xsd:enumeration value="ENVIRONMENT"/>
+ <xsd:enumeration value="NEVER"/>
+ <xsd:enumeration value="FALLBACK"/>
+ <xsd:enumeration value="OVERRIDE"/>
+ </xsd:restriction>
+ </xsd:simpleType>
+ </xsd:attribute>
+ </xsd:extension>
+ </xsd:complexContent>
+ </xsd:complexType>
+ </xsd:element>
+
+ <xsd:element name="property-override">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Activates pushing of override values into bean properties, based on configuration
+ lines of the following format: beanName.property=value
+ ]]></xsd:documentation>
+ <xsd:appinfo>
+ <tool:annotation>
+ <tool:exports type="org.springframework.beans.factory.config.PropertyOverrideConfigurer"/>
+ </tool:annotation>
+ </xsd:appinfo>
+ </xsd:annotation>
+ <xsd:complexType>
+ <xsd:complexContent>
+ <xsd:extension base="propertyPlaceholder"/>
+ </xsd:complexContent>
+ </xsd:complexType>
+ </xsd:element>
+
+ <xsd:element name="annotation-config">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Activates various annotations to be detected in bean classes: Spring's @Required and
+ @Autowired, as well as JSR 250's @PostConstruct, @PreDestroy and @Resource (if available),
+ JAX-WS's @WebServiceRef (if available), EJB3's @EJB (if available), and JPA's
+ @PersistenceContext and @PersistenceUnit (if available). Alternatively, you may
+ choose to activate the individual BeanPostProcessors for those annotations.
+
+ Note: This tag does not activate processing of Spring's @Transactional or EJB3's
+ @TransactionAttribute annotation. Consider the use of the <tx:annotation-driven>
+ tag for that purpose.
+
+ See javadoc for org.springframework.context.annotation.AnnotationConfigApplicationContext
+ for information on code-based alternatives to bootstrapping annotation-driven support.
+ from XML.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:element>
+
+ <xsd:element name="component-scan">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Scans the classpath for annotated components that will be auto-registered as
+ Spring beans. By default, the Spring-provided @Component, @Repository,
+ @Service, and @Controller stereotypes will be detected.
+
+ Note: This tag implies the effects of the 'annotation-config' tag, activating @Required,
+ @Autowired, @PostConstruct, @PreDestroy, @Resource, @PersistenceContext and @PersistenceUnit
+ annotations in the component classes, which is usually desired for autodetected components
+ (without external configuration). Turn off the 'annotation-config' attribute to deactivate
+ this default behavior, for example in order to use custom BeanPostProcessor definitions
+ for handling those annotations.
+
+ Note: You may use placeholders in package paths, but only resolved against system
+ properties (analogous to resource paths). A component scan results in new bean definition
+ being registered; Spring's PropertyPlaceholderConfigurer will apply to those bean
+ definitions just like to regular bean definitions, but it won't apply to the component
+ scan settings themselves.
+
+ See javadoc for org.springframework.context.annotation.ComponentScan for information
+ on code-based alternatives to bootstrapping component-scanning.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="include-filter" type="filterType"
+ minOccurs="0" maxOccurs="unbounded">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Controls which eligible types to include for component scanning.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:element>
+ <xsd:element name="exclude-filter" type="filterType"
+ minOccurs="0" maxOccurs="unbounded">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Controls which eligible types to exclude for component scanning.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:element>
+ </xsd:sequence>
+ <xsd:attribute name="base-package" type="xsd:string"
+ use="required">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The comma/semicolon/space/tab/linefeed-separated list of packages to scan for annotated components.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="resource-pattern" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Controls the class files eligible for component detection. Defaults to "**/*.class", the recommended value.
+ Consider use of the include-filter and exclude-filter elements for a more fine-grained approach.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="use-default-filters" type="xsd:boolean"
+ default="true">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Indicates whether automatic detection of classes annotated with @Component, @Repository, @Service,
+ or @Controller should be enabled. Default is "true".
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="annotation-config" type="xsd:boolean"
+ default="true">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Indicates whether the implicit annotation post-processors should be enabled. Default is "true".
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="name-generator" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The fully-qualified class name of the BeanNameGenerator to be used for naming detected components.
+ ]]></xsd:documentation>
+ <xsd:appinfo>
+ <tool:annotation>
+ <tool:expected-type type="java.lang.Class"/>
+ <tool:assignable-to type="org.springframework.beans.factory.support.BeanNameGenerator"/>
+ </tool:annotation>
+ </xsd:appinfo>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="scope-resolver" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The fully-qualified class name of the ScopeMetadataResolver to be used for resolving the scope of
+ detected components.
+ ]]></xsd:documentation>
+ <xsd:appinfo>
+ <tool:annotation>
+ <tool:expected-type type="java.lang.Class"/>
+ <tool:assignable-to type="org.springframework.context.annotation.ScopeMetadataResolver"/>
+ </tool:annotation>
+ </xsd:appinfo>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="scoped-proxy">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Indicates whether proxies should be generated for detected components, which may be necessary
+ when using scopes in a proxy-style fashion. Default is to generate no such proxies.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:simpleType>
+ <xsd:restriction base="xsd:string">
+ <xsd:enumeration value="no"/>
+ <xsd:enumeration value="interfaces"/>
+ <xsd:enumeration value="targetClass"/>
+ </xsd:restriction>
+ </xsd:simpleType>
+ </xsd:attribute>
+ </xsd:complexType>
+ </xsd:element>
+
+ <xsd:element name="load-time-weaver">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Activates a Spring LoadTimeWeaver for this application context, available as
+ a bean with the name "loadTimeWeaver". Any bean that implements the
+ LoadTimeWeaverAware interface will then receive the LoadTimeWeaver reference
+ automatically; for example, Spring's JPA bootstrap support.
+
+ The default weaver is determined automatically: see DefaultContextLoadTimeWeaver's
+ javadoc for details.
+
+ The activation of AspectJ load-time weaving is specified via a simple flag
+ (the 'aspectj-weaving' attribute), with the AspectJ class transformer
+ registered through Spring's LoadTimeWeaver. AspectJ weaving will be activated
+ by default if a "META-INF/aop.xml" resource is present in the classpath.
+
+ This also activates the current application context for applying dependency
+ injection to non-managed classes that are instantiated outside of the Spring
+ bean factory (typically classes annotated with the @Configurable annotation).
+ This will only happen if the AnnotationBeanConfigurerAspect is on the classpath
+ (i.e. spring-aspects.jar), effectively activating "spring-configured" by default.
+
+ See javadoc for org.springframework.context.annotation.EnableLoadTimeWeaving
+ for information on code-based alternatives to bootstrapping load-time weaving support.
+ ]]></xsd:documentation>
+ <xsd:appinfo>
+ <tool:annotation>
+ <tool:exports type="org.springframework.instrument.classloading.LoadTimeWeaver"/>
+ </tool:annotation>
+ </xsd:appinfo>
+ </xsd:annotation>
+ <xsd:complexType>
+ <xsd:attribute name="weaver-class" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The fully-qualified classname of the LoadTimeWeaver that is to be activated.
+ ]]></xsd:documentation>
+ <xsd:appinfo>
+ <tool:annotation>
+ <tool:expected-type type="java.lang.Class"/>
+ <tool:assignable-to type="org.springframework.instrument.classloading.LoadTimeWeaver"/>
+ </tool:annotation>
+ </xsd:appinfo>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="aspectj-weaving" default="autodetect">
+ <xsd:simpleType>
+ <xsd:restriction base="xsd:string">
+ <xsd:enumeration value="on">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Switches Spring-based AspectJ load-time weaving on.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:enumeration>
+ <xsd:enumeration value="off">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Switches Spring-based AspectJ load-time weaving off.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:enumeration>
+ <xsd:enumeration value="autodetect">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Switches AspectJ load-time weaving on if a "META-INF/aop.xml" resource
+ is present in the classpath. If there is no such resource, then AspectJ
+ load-time weaving will be switched off.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:enumeration>
+ </xsd:restriction>
+ </xsd:simpleType>
+ </xsd:attribute>
+ </xsd:complexType>
+ </xsd:element>
+
+ <xsd:element name="spring-configured">
+ <xsd:annotation>
+ <xsd:documentation source="java:org.springframework.beans.factory.aspectj.AnnotationBeanConfigurerAspect"><![CDATA[
+ Signals the current application context to apply dependency injection
+ to non-managed classes that are instantiated outside of the Spring bean
+ factory (typically classes annotated with the @Configurable annotation).
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:simpleType>
+ <xsd:restriction base="xsd:string"/>
+ </xsd:simpleType>
+ </xsd:element>
+
+ <xsd:element name="mbean-export">
+ <xsd:annotation>
+ <xsd:documentation source="java:org.springframework.jmx.export.annotation.AnnotationMBeanExporter"><![CDATA[
+ Activates default exporting of MBeans by detecting standard MBeans in the Spring
+ context as well as @ManagedResource annotations on Spring-defined beans.
+
+ The resulting MBeanExporter bean is defined under the name "mbeanExporter".
+ Alternatively, consider defining a custom AnnotationMBeanExporter bean explicitly.
+ ]]></xsd:documentation>
+ <xsd:appinfo>
+ <tool:annotation>
+ <tool:exports type="org.springframework.jmx.export.annotation.AnnotationMBeanExporter"/>
+ </tool:annotation>
+ </xsd:appinfo>
+ </xsd:annotation>
+ <xsd:complexType>
+ <xsd:attribute name="default-domain" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The default domain to use when generating JMX ObjectNames.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="server" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The bean name of the MBeanServer to which MBeans should be exported.
+ Default is to use the platform's default MBeanServer (autodetecting
+ WebLogic, WebSphere and the JVM's platform MBeanServer).
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="registration">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The registration behavior, indicating how to deal with existing MBeans
+ of the same name: fail with an exception, ignore and keep the existing
+ MBean, or replace the existing one with the new MBean.
+
+ Default is to fail with an exception.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:simpleType>
+ <xsd:restriction base="xsd:NMTOKEN">
+ <xsd:enumeration value="failOnExisting"/>
+ <xsd:enumeration value="ignoreExisting"/>
+ <xsd:enumeration value="replaceExisting"/>
+ </xsd:restriction>
+ </xsd:simpleType>
+ </xsd:attribute>
+ </xsd:complexType>
+ </xsd:element>
+
+ <xsd:element name="mbean-server">
+ <xsd:annotation>
+ <xsd:documentation source="java:org.springframework.jmx.support.MBeanServerFactoryBean"><![CDATA[
+ Exposes a default MBeanServer for the current platform.
+ Autodetects WebLogic, WebSphere and the JVM's platform MBeanServer.
+
+ The default bean name for the exposed MBeanServer is "mbeanServer".
+ This may be customized through specifying the "id" attribute.
+ ]]></xsd:documentation>
+ <xsd:appinfo>
+ <tool:annotation>
+ <tool:exports type="javax.management.MBeanServer"/>
+ </tool:annotation>
+ </xsd:appinfo>
+ </xsd:annotation>
+ <xsd:complexType>
+ <xsd:complexContent>
+ <xsd:extension base="beans:identifiedType">
+ <xsd:attribute name="agent-id" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The agent id of the target MBeanServer, if any.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ </xsd:extension>
+ </xsd:complexContent>
+ </xsd:complexType>
+ </xsd:element>
+
+ <xsd:complexType name="filterType">
+ <xsd:attribute name="type" use="required">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Controls the type of filtering to apply to the expression.
+
+ "annotation" indicates an annotation to be present at the type level in target components;
+ "assignable" indicates a class (or interface) that the target components are assignable to (extend/implement);
+ "aspectj" indicates an AspectJ type pattern expression to be matched by the target components;
+ "regex" indicates a regex pattern to be matched by the target components' class names;
+ "custom" indicates a custom implementation of the org.springframework.core.type.TypeFilter interface.
+
+ Note: This attribute will not be inherited by child bean definitions.
+ Hence, it needs to be specified per concrete bean definition.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:simpleType>
+ <xsd:restriction base="xsd:string">
+ <xsd:enumeration value="annotation"/>
+ <xsd:enumeration value="assignable"/>
+ <xsd:enumeration value="aspectj"/>
+ <xsd:enumeration value="regex"/>
+ <xsd:enumeration value="custom"/>
+ </xsd:restriction>
+ </xsd:simpleType>
+ </xsd:attribute>
+ <xsd:attribute name="expression" type="xsd:string" use="required">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Indicates the filter expression, the type of which is indicated by "type".
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ </xsd:complexType>
+
+</xsd:schema>
diff --git a/spring-context/src/main/resources/org/springframework/ejb/config/spring-jee-4.2.xsd b/spring-context/src/main/resources/org/springframework/ejb/config/spring-jee-4.2.xsd
new file mode 100644
index 000000000000..3ad80562fbe0
--- /dev/null
+++ b/spring-context/src/main/resources/org/springframework/ejb/config/spring-jee-4.2.xsd
@@ -0,0 +1,267 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+
+<xsd:schema xmlns="http://www.springframework.org/schema/jee"
+ xmlns:xsd="http://www.w3.org/2001/XMLSchema"
+ xmlns:beans="http://www.springframework.org/schema/beans"
+ xmlns:tool="http://www.springframework.org/schema/tool"
+ targetNamespace="http://www.springframework.org/schema/jee"
+ elementFormDefault="qualified"
+ attributeFormDefault="unqualified">
+
+ <xsd:import namespace="http://www.springframework.org/schema/beans" schemaLocation="http://www.springframework.org/schema/beans/spring-beans-4.1.xsd"/>
+ <xsd:import namespace="http://www.springframework.org/schema/tool" schemaLocation="http://www.springframework.org/schema/tool/spring-tool-4.1.xsd"/>
+
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Defines configuration elements for access to traditional Java EE components
+ such as JNDI resources and EJB session beans.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+
+ <xsd:element name="jndi-lookup">
+ <xsd:annotation>
+ <xsd:documentation source="java:org.springframework.jndi.JndiObjectFactoryBean"><![CDATA[
+ Exposes an object reference via a JNDI lookup.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:complexType>
+ <xsd:complexContent>
+ <xsd:extension base="jndiLocatingType">
+ <xsd:attribute name="cache" type="xsd:boolean" default="true">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Controls whether the object returned from the JNDI lookup is cached
+ after the first lookup.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="expected-type" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation source="java:java.lang.Class"><![CDATA[
+ The type that the located JNDI object is supposed to be assignable
+ to, if indeed any.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="lookup-on-startup" type="xsd:boolean" default="true">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Controls whether the JNDI lookup is performed immediately on startup
+ (if true, the default), or on first access (if false).
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="proxy-interface" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation source="java:java.lang.Class"><![CDATA[
+ The proxy interface to use for the JNDI object.
+
+ Needs to be specified because the actual JNDI object type is not
+ known in advance in case of a lazy lookup.
+
+ Typically used in conjunction with "lookupOnStartup"=false and/or
+ "cache"=false.
+ ]]></xsd:documentation>
+ <xsd:appinfo>
+ <tool:annotation>
+ <tool:expected-type type="java.lang.Class"/>
+ </tool:annotation>
+ </xsd:appinfo>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="default-value" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Specify a default literal value to fall back to if the JNDI lookup fails.
+ This is typically used for literal values in scenarios where the JNDI environment
+ might define specific config settings but those are not required to be present.
+
+ Default is none. Note: This is only supported for lookup on startup.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="default-ref" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Specify a default bean reference to fall back to if the JNDI lookup fails.
+ This might for example point to a local fallback DataSource.
+
+ Default is none. Note: This is only supported for lookup on startup.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ </xsd:extension>
+ </xsd:complexContent>
+ </xsd:complexType>
+ </xsd:element>
+
+ <xsd:element name="local-slsb" type="ejbType">
+ <xsd:annotation>
+ <xsd:documentation source="java:org.springframework.ejb.access.LocalStatelessSessionProxyFactoryBean"><![CDATA[
+ Exposes a reference to a local EJB Stateless SessionBean.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:element>
+
+ <xsd:element name="remote-slsb">
+ <xsd:annotation>
+ <xsd:documentation source="java:org.springframework.ejb.access.SimpleRemoteStatelessSessionProxyFactoryBean"><![CDATA[
+ Exposes a reference to a remote EJB Stateless SessionBean.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:complexType>
+ <xsd:complexContent>
+ <xsd:extension base="ejbType">
+ <xsd:attribute name="home-interface" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation source="java:java.lang.Class"><![CDATA[
+ The home interface that will be narrowed to before performing
+ the parameterless SLSB create() call that returns the actual
+ SLSB proxy.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="refresh-home-on-connect-failure" type="xsd:boolean" default="false">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Controls whether to refresh the EJB home on connect failure.
+
+ Can be turned on to allow for hot restart of the EJB server.
+ If a cached EJB home throws an RMI exception that indicates a
+ remote connect failure, a fresh home will be fetched and the
+ invocation will be retried.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="cache-session-bean" type="xsd:boolean" default="false">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Controls whether to cache the actual session bean object.
+
+ Off by default for standard EJB compliance. Turn this flag
+ on to optimize session bean access for servers that are
+ known to allow for caching the actual session bean object.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ </xsd:extension>
+ </xsd:complexContent>
+ </xsd:complexType>
+ </xsd:element>
+
+ <!-- base types -->
+ <xsd:complexType name="jndiLocatingType" abstract="true">
+ <xsd:complexContent>
+ <xsd:extension base="beans:identifiedType">
+ <xsd:sequence>
+ <xsd:element name="environment" minOccurs="0" maxOccurs="1">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The newline-separated, key-value pairs for the JNDI environment
+ (in standard Properties format, namely 'key=value' pairs)
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:simpleType>
+ <xsd:restriction base="xsd:string"/>
+ </xsd:simpleType>
+ </xsd:element>
+ </xsd:sequence>
+ <xsd:attribute name="environment-ref" type="environmentRefType">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ A reference to JNDI environment properties, indicating the name of a
+ shared bean of type [java.util.Properties}.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="jndi-name" type="xsd:string" use="required">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The JNDI name to look up. This may be a fully-qualified JNDI path
+ or a local Java EE environment naming context path in which case the
+ prefix "java:comp/env/" will be prepended if applicable.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="resource-ref" type="xsd:boolean" default="true">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Controls whether the lookup occurs in a Java EE container, i.e. if the
+ prefix "java:comp/env/" needs to be added if the JNDI name doesn't
+ already contain it. Default is "true" (since Spring 2.5).
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="expose-access-context" type="xsd:boolean" default="false">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Set whether to expose the JNDI environment context for all access to the target
+ EJB, i.e. for all method invocations on the exposed object reference.
+ Default is "false", i.e. to only expose the JNDI context for object lookup.
+
+ Switch this flag to "true" in order to expose the JNDI environment (including
+ the authorization context) for each EJB invocation, as needed by WebLogic
+ for EJBs with authorization requirements.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="lazy-init" default="default" type="beans:defaultable-boolean">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Indicates whether or not this bean is to be lazily initialized.
+ If false, it will be instantiated on startup by bean factories
+ that perform eager initialization of singletons. The default is
+ "false".
+
+ Note: This attribute will not be inherited by child bean definitions.
+ Hence, it needs to be specified per concrete bean definition.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ </xsd:extension>
+ </xsd:complexContent>
+ </xsd:complexType>
+
+ <xsd:complexType name="ejbType">
+ <xsd:complexContent>
+ <xsd:extension base="jndiLocatingType">
+ <xsd:attribute name="lookup-home-on-startup" type="xsd:boolean" default="true">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Controls whether the lookup of the EJB home object is performed
+ immediately on startup (if true, the default), or on first access
+ (if false).
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="cache-home" type="xsd:boolean" default="true">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Controls whether the EJB home object is cached once it has been located.
+ On by default; turn this flag off to always reobtain fresh home objects.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="business-interface" type="xsd:string" use="required">
+ <xsd:annotation>
+ <xsd:documentation source="java:java.lang.Class"><![CDATA[
+ The business interface of the EJB being proxied.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ </xsd:extension>
+ </xsd:complexContent>
+ </xsd:complexType>
+
+ <xsd:simpleType name="environmentRefType">
+ <xsd:annotation>
+ <xsd:appinfo>
+ <tool:annotation kind="ref">
+ <tool:expected-type type="java.util.Properties"/>
+ </tool:annotation>
+ </xsd:appinfo>
+ </xsd:annotation>
+ <xsd:union memberTypes="xsd:string"/>
+ </xsd:simpleType>
+
+</xsd:schema>
diff --git a/spring-context/src/main/resources/org/springframework/scheduling/config/spring-task-4.2.xsd b/spring-context/src/main/resources/org/springframework/scheduling/config/spring-task-4.2.xsd
new file mode 100644
index 000000000000..c4304fa185ea
--- /dev/null
+++ b/spring-context/src/main/resources/org/springframework/scheduling/config/spring-task-4.2.xsd
@@ -0,0 +1,307 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+
+<xsd:schema xmlns="http://www.springframework.org/schema/task"
+ xmlns:xsd="http://www.w3.org/2001/XMLSchema"
+ xmlns:tool="http://www.springframework.org/schema/tool"
+ targetNamespace="http://www.springframework.org/schema/task"
+ elementFormDefault="qualified"
+ attributeFormDefault="unqualified">
+
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Defines the elements used in the Spring Framework's support for task execution and scheduling.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+
+ <xsd:import namespace="http://www.springframework.org/schema/beans" schemaLocation="http://www.springframework.org/schema/beans/spring-beans-4.1.xsd"/>
+ <xsd:import namespace="http://www.springframework.org/schema/tool" schemaLocation="http://www.springframework.org/schema/tool/spring-tool-4.1.xsd"/>
+
+ <xsd:element name="annotation-driven">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Enables the detection of @Async and @Scheduled annotations on any Spring-managed
+ object. If present, a proxy will be generated for executing the annotated methods
+ asynchronously.
+
+ See Javadoc for the org.springframework.scheduling.annotation.EnableAsync and
+ org.springframework.scheduling.annotation.EnableScheduling annotations for information
+ on code-based alternatives to this XML element.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:complexType>
+ <xsd:attribute name="executor" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Specifies the java.util.Executor instance to use when invoking asynchronous methods.
+ If not provided, an instance of org.springframework.core.task.SimpleAsyncTaskExecutor
+ will be used by default.
+ Note that as of Spring 3.1.2, individual @Async methods may qualify which executor to
+ use, meaning that the executor specified here acts as a default for all non-qualified
+ @Async methods.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="exception-handler" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Specifies the org.springframework.aop.interceptor.AsyncUncaughtExceptionHandler
+ instance to use when an exception is thrown during an asynchronous method execution
+ and cannot be accessed by the caller. If not provided, an instance of
+ org.springframework.aop.interceptor.SimpleAsyncUncaughtExceptionHandler will be
+ used by default.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="scheduler" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Specifies the org.springframework.scheduling.TaskScheduler or
+ java.util.ScheduledExecutorService instance to use when invoking scheduled
+ methods. If no reference is provided, a TaskScheduler backed by a single
+ thread scheduled executor will be used.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="mode" default="proxy">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Should annotated beans be proxied using Spring's AOP framework,
+ or should they rather be weaved with an AspectJ async execution aspect?
+
+ AspectJ weaving requires spring-aspects.jar on the classpath,
+ as well as load-time weaving (or compile-time weaving) enabled.
+
+ Note: The weaving-based aspect requires the @Async annotation to be
+ defined on the concrete class. Annotations in interfaces will not work
+ in that case (they will rather only work with interface-based proxies)!
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:simpleType>
+ <xsd:restriction base="xsd:string">
+ <xsd:enumeration value="proxy"/>
+ <xsd:enumeration value="aspectj"/>
+ </xsd:restriction>
+ </xsd:simpleType>
+ </xsd:attribute>
+ <xsd:attribute name="proxy-target-class" type="xsd:boolean" default="false">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Are class-based (CGLIB) proxies to be created? By default, standard
+ Java interface-based proxies are created.
+
+ Note: Class-based proxies require the @Async annotation to be defined
+ on the concrete class. Annotations in interfaces will not work in
+ that case (they will rather only work with interface-based proxies)!
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ </xsd:complexType>
+ </xsd:element>
+
+ <xsd:element name="scheduler">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Defines a ThreadPoolTaskScheduler instance with configurable pool size. See Javadoc
+ for the org.springframework.scheduling.annotation.EnableScheduling annotation for
+ information on a code-based alternative to this XML element.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:complexType>
+ <xsd:attribute name="id" type="xsd:string" use="required">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The bean name for the generated ThreadPoolTaskScheduler instance.
+ It will also be used as the default thread name prefix.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="pool-size" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The size of the ScheduledExecutorService's thread pool. The default is 1.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ </xsd:complexType>
+ </xsd:element>
+
+ <xsd:element name="executor">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Defines a ThreadPoolTaskExecutor instance with configurable pool size,
+ queue-capacity, keep-alive, and rejection-policy values.
+
+ See Javadoc for the org.springframework.scheduling.annotation.EnableAsync annotation
+ for information on code-based alternatives to this XML element.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:complexType>
+ <xsd:attribute name="id" type="xsd:string" use="required">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The bean name for the generated ThreadPoolTaskExecutor instance.
+ This value will also be used as the thread name prefix which is why it is
+ required even when defining the executor as an inner bean: The executor
+ won't be directly accessible then but will nevertheless use the specified
+ id as the thread name prefix of the threads that it manages.
+ In the case of multiple task:executors, as of Spring 3.1.2 this value may be used to
+ qualify which executor should handle a given @Async method, e.g. @Async("executorId").
+ See the Javadoc for the #value attribute of Spring's @Async annotation for details.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="pool-size" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The size of the executor's thread pool as either a single value or a range
+ (e.g. 5-10). If no bounded queue-capacity value is provided, then a max value
+ has no effect unless the range is specified as 0-n. In that case, the core pool
+ will have a size of n, but the 'allowCoreThreadTimeout' flag will be set to true.
+ If a queue-capacity is provided, then the lower bound of a range will map to the
+ core size and the upper bound will map to the max size. If this attribute is not
+ provided, the default core size will be 1, and the default max size will be
+ Integer.MAX_VALUE (i.e. unbounded).
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="queue-capacity" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Queue capacity for the ThreadPoolTaskExecutor. If not specified, the default will
+ be Integer.MAX_VALUE (i.e. unbounded).
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="keep-alive" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Keep-alive time in seconds. Inactive threads that have been created beyond the
+ core size will timeout after the specified number of seconds elapse. If the
+ executor has an unbounded queue capacity and a size range represented as 0-n,
+ then the core threads will also be configured to timeout when inactive.
+ Otherwise, core threads will not ever timeout.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="rejection-policy" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The RejectedExecutionHandler type. When a bounded queue cannot accept any
+ additional tasks, this determines the behavior. While the default is ABORT,
+ consider using CALLER_RUNS to throttle inbound tasks. In other words, by forcing
+ the caller to run the task itself, it will not be able to provide another task
+ until after it completes the task at hand. In the meantime, one or more tasks
+ may be removed from the queue. Alternatively, if it is not critical to run every
+ task, consider using DISCARD to drop the current task or DISCARD_OLDEST to drop
+ the task at the head of the queue.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:simpleType>
+ <xsd:restriction base="xsd:string">
+ <xsd:enumeration value="ABORT"/>
+ <xsd:enumeration value="CALLER_RUNS"/>
+ <xsd:enumeration value="DISCARD"/>
+ <xsd:enumeration value="DISCARD_OLDEST"/>
+ </xsd:restriction>
+ </xsd:simpleType>
+ </xsd:attribute>
+ </xsd:complexType>
+ </xsd:element>
+
+ <xsd:element name="scheduled-tasks">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Top-level element that contains one or more task sub-elements to be
+ managed by a given TaskScheduler.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="scheduled" type="scheduledTaskType" minOccurs="1" maxOccurs="unbounded"/>
+ </xsd:sequence>
+ <xsd:attribute name="scheduler" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Reference to an instance of TaskScheduler to manage the provided tasks. If not specified,
+ the default value will be a wrapper for a single-threaded Executor.
+ ]]></xsd:documentation>
+ <xsd:appinfo>
+ <tool:annotation kind="ref">
+ <tool:expected-type type="org.springframework.scheduling.TaskScheduler"/>
+ </tool:annotation>
+ </xsd:appinfo>
+ </xsd:annotation>
+ </xsd:attribute>
+ </xsd:complexType>
+ </xsd:element>
+
+ <xsd:complexType name="scheduledTaskType">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Element defining a scheduled method-invoking task and its corresponding trigger.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:attribute name="cron" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ A cron-based trigger. See the org.springframework.scheduling.support.CronSequenceGenerator
+ JavaDoc for example patterns.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="fixed-delay" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ An interval-based trigger where the interval is measured from the completion time of the
+ previous task. The time unit value is measured in milliseconds.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="fixed-rate" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ An interval-based trigger where the interval is measured from the start time of the
+ previous task. The time unit value is measured in milliseconds.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="trigger" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ A reference to a bean that implements the Trigger interface.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="initial-delay" type="xsd:string" use="optional">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Number of milliseconds to delay before the first execution of a 'fixed-rate' or
+ 'fixed-delay' task.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="ref" type="xsd:string" use="required">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Reference to an object that provides a method to be invoked.
+ ]]></xsd:documentation>
+ <xsd:appinfo>
+ <tool:annotation kind="ref" />
+ </xsd:appinfo>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="method" type="xsd:string" use="required">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The name of the method to be invoked.
+ ]]></xsd:documentation>
+ <xsd:appinfo>
+ <tool:annotation>
+ <tool:expected-method type-ref="@ref"/>
+ </tool:annotation>
+ </xsd:appinfo>
+ </xsd:annotation>
+ </xsd:attribute>
+ </xsd:complexType>
+
+</xsd:schema>
diff --git a/spring-context/src/main/resources/org/springframework/scripting/config/spring-lang-4.2.xsd b/spring-context/src/main/resources/org/springframework/scripting/config/spring-lang-4.2.xsd
new file mode 100644
index 000000000000..061d7c1bada0
--- /dev/null
+++ b/spring-context/src/main/resources/org/springframework/scripting/config/spring-lang-4.2.xsd
@@ -0,0 +1,255 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+
+<xsd:schema xmlns="http://www.springframework.org/schema/lang"
+ xmlns:xsd="http://www.w3.org/2001/XMLSchema"
+ xmlns:beans="http://www.springframework.org/schema/beans"
+ targetNamespace="http://www.springframework.org/schema/lang"
+ elementFormDefault="qualified"
+ attributeFormDefault="unqualified">
+
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Defines the elements used in the Spring Framework's dynamic language
+ support, which allows bean definitions that are backed by classes
+ written in a language other than Java.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+
+ <xsd:import namespace="http://www.springframework.org/schema/beans" schemaLocation="http://www.springframework.org/schema/beans/spring-beans-4.2.xsd"/>
+ <xsd:import namespace="http://www.springframework.org/schema/tool" schemaLocation="http://www.springframework.org/schema/tool/spring-tool-4.2.xsd"/>
+
+ <xsd:element name="defaults">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Default settings for any scripted beans registered within this context.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:complexType>
+ <xsd:attributeGroup ref="defaultableAttributes"/>
+ </xsd:complexType>
+ </xsd:element>
+
+ <xsd:element name="groovy">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ A Spring bean backed by a Groovy class definition.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:complexType>
+ <xsd:complexContent>
+ <xsd:extension base="customizableScriptType">
+ <xsd:attributeGroup ref="defaultableAttributes"/>
+ </xsd:extension>
+ </xsd:complexContent>
+ </xsd:complexType>
+ </xsd:element>
+
+ <xsd:element name="jruby">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ A Spring bean backed by a JRuby class definition.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:complexType>
+ <xsd:complexContent>
+ <xsd:extension base="dynamicScriptType">
+ <xsd:attributeGroup ref="vanillaScriptAttributes"/>
+ </xsd:extension>
+ </xsd:complexContent>
+ </xsd:complexType>
+ </xsd:element>
+
+ <xsd:element name="bsh">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ A Spring bean backed by a BeanShell script.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:complexType>
+ <xsd:complexContent>
+ <xsd:extension base="dynamicScriptType">
+ <xsd:attributeGroup ref="vanillaScriptAttributes"/>
+ </xsd:extension>
+ </xsd:complexContent>
+ </xsd:complexType>
+ </xsd:element>
+
+ <xsd:element name="std">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ A Spring bean backed by a standard JSR-223 based script.
+ Supports JavaScript, Groovy, JRuby and other JSR-223 compliant engines.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:complexType>
+ <xsd:complexContent>
+ <xsd:extension base="dynamicScriptType">
+ <xsd:attribute name="engine" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The name of the script engine (if not inferred from the file extension).
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attributeGroup ref="vanillaScriptAttributes"/>
+ </xsd:extension>
+ </xsd:complexContent>
+ </xsd:complexType>
+ </xsd:element>
+
+ <!-- Script Types -->
+ <xsd:complexType name="simpleScriptType">
+ <xsd:complexContent>
+ <xsd:extension base="beans:identifiedType">
+ <xsd:sequence>
+ <xsd:element name="inline-script" minOccurs="0" maxOccurs="1">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The source code for the dynamic language-backed bean.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:element>
+ <xsd:element name="property" type="beans:propertyType" minOccurs="0" maxOccurs="unbounded">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Dynamic language-backed bean definitions can have zero or more properties.
+ Property elements correspond to JavaBean setter methods exposed
+ by the bean classes. Spring supports primitives, references to other
+ beans in the same or related factories, lists, maps and properties.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:element>
+ </xsd:sequence>
+ <xsd:attribute name="script-source" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation source="java:org.springframework.core.io.Resource"><![CDATA[
+ The resource containing the script for the dynamic language-backed bean.
+
+ Examples might be '/WEB-INF/scripts/Anais.groovy', 'classpath:Nin.bsh', etc.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="name" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The name of this scripted bean as an alias or replacement for the id.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="scope" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The scope of this scripted bean: typically "singleton" (one shared instance,
+ which will be returned by all calls to getBean with the given id), or
+ "prototype" (independent instance resulting from each call to getBean).
+ Default is "singleton".
+
+ Singletons are most commonly used, and are ideal for multi-threaded
+ service objects. Further scopes, such as "request" or "session", might
+ be supported by extended bean factories (e.g. in a web environment).
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="autowire" default="default">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The autowire mode for the scripted bean.
+ Analogous to the 'autowire' attribute on a standard bean definition.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ <xsd:simpleType>
+ <xsd:restriction base="xsd:NMTOKEN">
+ <xsd:enumeration value="default"/>
+ <xsd:enumeration value="no"/>
+ <xsd:enumeration value="byName"/>
+ <xsd:enumeration value="byType"/>
+ </xsd:restriction>
+ </xsd:simpleType>
+ </xsd:attribute>
+ <xsd:attribute name="depends-on" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The names of the beans that this bean depends on being initialized.
+ The bean factory will guarantee that these beans get initialized
+ before this bean.
+
+ Note that dependencies are normally expressed through bean properties.
+ This property should just be necessary for other kinds of dependencies
+ like statics (*ugh*) or database preparation on startup.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="init-method" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The name of an initialization method defined on the scripted bean.
+ Analogous to the 'init-method' attribute on a standard bean definition.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attribute name="destroy-method" type="xsd:string">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The name of a destruction method defined on the scripted bean.
+ Analogous to the 'destroy-method' attribute on a standard bean definition.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ </xsd:extension>
+ </xsd:complexContent>
+ </xsd:complexType>
+
+ <xsd:complexType name="dynamicScriptType">
+ <xsd:complexContent>
+ <xsd:extension base="simpleScriptType">
+ <xsd:attribute name="script-interfaces">
+ <xsd:annotation>
+ <xsd:documentation source="java:java.lang.Class"><![CDATA[
+ The Java interfaces that the dynamic language-backed object is to expose; comma-delimited.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ </xsd:extension>
+ </xsd:complexContent>
+ </xsd:complexType>
+
+ <xsd:complexType name="customizableScriptType">
+ <xsd:complexContent>
+ <xsd:extension base="simpleScriptType">
+ <xsd:attribute name="customizer-ref">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Reference to a GroovyObjectCustomizer or similar customizer bean.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ </xsd:extension>
+ </xsd:complexContent>
+ </xsd:complexType>
+
+ <xsd:attributeGroup name="vanillaScriptAttributes">
+ <xsd:attribute name="refresh-check-delay" type="xsd:long">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ The delay (in milliseconds) between checks for updated sources when
+ using the refreshable beans feature.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ </xsd:attributeGroup>
+
+ <xsd:attributeGroup name="defaultableAttributes">
+ <xsd:attribute name="proxy-target-class" type="xsd:boolean">
+ <xsd:annotation>
+ <xsd:documentation><![CDATA[
+ Flag to tell the bean factory that if this bean is proxied it should be done using the target class type,
+ not its interfaces. A refreshable script is normally proxied, so often this is useful in conjunction with
+ refresh-check-delay. Defaults to false requiring no additional library dependencies, but hiding behavior
+ in the bean that is not defined in an interface.
+ ]]></xsd:documentation>
+ </xsd:annotation>
+ </xsd:attribute>
+ <xsd:attributeGroup ref="vanillaScriptAttributes"></xsd:attributeGroup>
+ </xsd:attributeGroup>
+
+</xsd:schema>
diff --git a/spring-context/src/test/java/org/springframework/scripting/groovy/GroovyScriptFactoryTests.java b/spring-context/src/test/java/org/springframework/scripting/groovy/GroovyScriptFactoryTests.java
index 21044dd9a5c7..1f5131a51c50 100644
--- a/spring-context/src/test/java/org/springframework/scripting/groovy/GroovyScriptFactoryTests.java
+++ b/spring-context/src/test/java/org/springframework/scripting/groovy/GroovyScriptFactoryTests.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2013 the original author or authors.
+ * Copyright 2002-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -90,6 +90,35 @@ public void testStaticScript() throws Exception {
assertTrue(ctx.getBeansOfType(Messenger.class).values().contains(messenger));
}
+ @Test
+ public void testStaticScriptUsingJsr223() throws Exception {
+ ApplicationContext ctx = new ClassPathXmlApplicationContext("groovyContextWithJsr223.xml", getClass());
+
+ assertTrue(Arrays.asList(ctx.getBeanNamesForType(Calculator.class)).contains("calculator"));
+ assertTrue(Arrays.asList(ctx.getBeanNamesForType(Messenger.class)).contains("messenger"));
+
+ Calculator calc = (Calculator) ctx.getBean("calculator");
+ Messenger messenger = (Messenger) ctx.getBean("messenger");
+
+ assertFalse("Shouldn't get proxy when refresh is disabled", AopUtils.isAopProxy(calc));
+ assertFalse("Shouldn't get proxy when refresh is disabled", AopUtils.isAopProxy(messenger));
+
+ assertFalse("Scripted object should not be instance of Refreshable", calc instanceof Refreshable);
+ assertFalse("Scripted object should not be instance of Refreshable", messenger instanceof Refreshable);
+
+ assertEquals(calc, calc);
+ assertEquals(messenger, messenger);
+ assertTrue(!messenger.equals(calc));
+ assertTrue(messenger.hashCode() != calc.hashCode());
+ assertTrue(!messenger.toString().equals(calc.toString()));
+
+ String desiredMessage = "Hello World!";
+ assertEquals("Message is incorrect", desiredMessage, messenger.getMessage());
+
+ assertTrue(ctx.getBeansOfType(Calculator.class).values().contains(calc));
+ assertTrue(ctx.getBeansOfType(Messenger.class).values().contains(messenger));
+ }
+
@Test
public void testStaticPrototypeScript() throws Exception {
ApplicationContext ctx = new ClassPathXmlApplicationContext("groovyContext.xml", getClass());
@@ -109,6 +138,25 @@ public void testStaticPrototypeScript() throws Exception {
assertEquals("Byebye World!", messenger2.getMessage());
}
+ @Test
+ public void testStaticPrototypeScriptUsingJsr223() throws Exception {
+ ApplicationContext ctx = new ClassPathXmlApplicationContext("groovyContextWithJsr223.xml", getClass());
+ ConfigurableMessenger messenger = (ConfigurableMessenger) ctx.getBean("messengerPrototype");
+ ConfigurableMessenger messenger2 = (ConfigurableMessenger) ctx.getBean("messengerPrototype");
+
+ assertFalse("Shouldn't get proxy when refresh is disabled", AopUtils.isAopProxy(messenger));
+ assertFalse("Scripted object should not be instance of Refreshable", messenger instanceof Refreshable);
+
+ assertNotSame(messenger, messenger2);
+ assertSame(messenger.getClass(), messenger2.getClass());
+ assertEquals("Hello World!", messenger.getMessage());
+ assertEquals("Hello World!", messenger2.getMessage());
+ messenger.setMessage("Bye World!");
+ messenger2.setMessage("Byebye World!");
+ assertEquals("Bye World!", messenger.getMessage());
+ assertEquals("Byebye World!", messenger2.getMessage());
+ }
+
@Test
public void testStaticScriptWithInstance() throws Exception {
ApplicationContext ctx = new ClassPathXmlApplicationContext("groovyContext.xml", getClass());
@@ -123,6 +171,20 @@ public void testStaticScriptWithInstance() throws Exception {
assertTrue(ctx.getBeansOfType(Messenger.class).values().contains(messenger));
}
+ @Test
+ public void testStaticScriptWithInstanceUsingJsr223() throws Exception {
+ ApplicationContext ctx = new ClassPathXmlApplicationContext("groovyContextWithJsr223.xml", getClass());
+ assertTrue(Arrays.asList(ctx.getBeanNamesForType(Messenger.class)).contains("messengerInstance"));
+ Messenger messenger = (Messenger) ctx.getBean("messengerInstance");
+
+ assertFalse("Shouldn't get proxy when refresh is disabled", AopUtils.isAopProxy(messenger));
+ assertFalse("Scripted object should not be instance of Refreshable", messenger instanceof Refreshable);
+
+ String desiredMessage = "Hello World!";
+ assertEquals("Message is incorrect", desiredMessage, messenger.getMessage());
+ assertTrue(ctx.getBeansOfType(Messenger.class).values().contains(messenger));
+ }
+
@Test
public void testStaticScriptWithInlineDefinedInstance() throws Exception {
ApplicationContext ctx = new ClassPathXmlApplicationContext("groovyContext.xml", getClass());
@@ -137,6 +199,20 @@ public void testStaticScriptWithInlineDefinedInstance() throws Exception {
assertTrue(ctx.getBeansOfType(Messenger.class).values().contains(messenger));
}
+ @Test
+ public void testStaticScriptWithInlineDefinedInstanceUsingJsr223() throws Exception {
+ ApplicationContext ctx = new ClassPathXmlApplicationContext("groovyContextWithJsr223.xml", getClass());
+ assertTrue(Arrays.asList(ctx.getBeanNamesForType(Messenger.class)).contains("messengerInstanceInline"));
+ Messenger messenger = (Messenger) ctx.getBean("messengerInstanceInline");
+
+ assertFalse("Shouldn't get proxy when refresh is disabled", AopUtils.isAopProxy(messenger));
+ assertFalse("Scripted object should not be instance of Refreshable", messenger instanceof Refreshable);
+
+ String desiredMessage = "Hello World!";
+ assertEquals("Message is incorrect", desiredMessage, messenger.getMessage());
+ assertTrue(ctx.getBeansOfType(Messenger.class).values().contains(messenger));
+ }
+
@Test
public void testNonStaticScript() throws Exception {
ApplicationContext ctx = new ClassPathXmlApplicationContext("groovyRefreshableContext.xml", getClass());
@@ -311,8 +387,6 @@ public void testGetScriptedObjectDoesChokeOnNullScriptSourceBeingPassedIn() thro
}
}
- @Ignore
- // see http://build.springframework.org/browse/SPR-TRUNKQUICK-908
@Test
public void testResourceScriptFromTag() throws Exception {
ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext("groovy-with-xsd.xml", getClass());
@@ -409,6 +483,49 @@ public void testAnonymousScriptDetected() throws Exception {
assertEquals(4, beans.size());
}
+ @Test
+ public void testJsr223FromTag() throws Exception {
+ ApplicationContext ctx = new ClassPathXmlApplicationContext("groovy-with-xsd-jsr223.xml", getClass());
+ assertTrue(Arrays.asList(ctx.getBeanNamesForType(Messenger.class)).contains("messenger"));
+ Messenger messenger = (Messenger) ctx.getBean("messenger");
+ assertFalse(AopUtils.isAopProxy(messenger));
+ assertEquals("Hello World!", messenger.getMessage());
+ }
+
+ @Test
+ public void testJsr223FromTagWithInterface() throws Exception {
+ ApplicationContext ctx = new ClassPathXmlApplicationContext("groovy-with-xsd-jsr223.xml", getClass());
+ assertTrue(Arrays.asList(ctx.getBeanNamesForType(Messenger.class)).contains("messengerWithInterface"));
+ Messenger messenger = (Messenger) ctx.getBean("messengerWithInterface");
+ assertFalse(AopUtils.isAopProxy(messenger));
+ }
+
+ @Test
+ public void testRefreshableJsr223FromTag() throws Exception {
+ ApplicationContext ctx = new ClassPathXmlApplicationContext("groovy-with-xsd-jsr223.xml", getClass());
+ assertTrue(Arrays.asList(ctx.getBeanNamesForType(Messenger.class)).contains("refreshableMessenger"));
+ Messenger messenger = (Messenger) ctx.getBean("refreshableMessenger");
+ assertTrue(AopUtils.isAopProxy(messenger));
+ assertTrue(messenger instanceof Refreshable);
+ assertEquals("Hello World!", messenger.getMessage());
+ }
+
+ @Test
+ public void testInlineJsr223FromTag() throws Exception {
+ ApplicationContext ctx = new ClassPathXmlApplicationContext("groovy-with-xsd-jsr223.xml", getClass());
+ assertTrue(Arrays.asList(ctx.getBeanNamesForType(Messenger.class)).contains("inlineMessenger"));
+ Messenger messenger = (Messenger) ctx.getBean("inlineMessenger");
+ assertFalse(AopUtils.isAopProxy(messenger));
+ }
+
+ @Test
+ public void testInlineJsr223FromTagWithInterface() throws Exception {
+ ApplicationContext ctx = new ClassPathXmlApplicationContext("groovy-with-xsd-jsr223.xml", getClass());
+ assertTrue(Arrays.asList(ctx.getBeanNamesForType(Messenger.class)).contains("inlineMessengerWithInterface"));
+ Messenger messenger = (Messenger) ctx.getBean("inlineMessengerWithInterface");
+ assertFalse(AopUtils.isAopProxy(messenger));
+ }
+
/**
* Tests the SPR-2098 bug whereby no more than 1 property element could be
* passed to a scripted bean :(
diff --git a/spring-context/src/test/java/org/springframework/scripting/jruby/JRubyScriptFactoryTests.java b/spring-context/src/test/java/org/springframework/scripting/jruby/JRubyScriptFactoryTests.java
index 0c45266dbf82..95c6de6f4872 100644
--- a/spring-context/src/test/java/org/springframework/scripting/jruby/JRubyScriptFactoryTests.java
+++ b/spring-context/src/test/java/org/springframework/scripting/jruby/JRubyScriptFactoryTests.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2013 the original author or authors.
+ * Copyright 2002-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -64,6 +64,27 @@ public void testStaticScript() throws Exception {
assertNotSame(messenger.hashCode(), calc.hashCode());
assertTrue(!messenger.toString().equals(calc.toString()));
+ assertEquals(3, calc.add(1, 2));
+ String desiredMessage = "Hello World!";
+ assertEquals("Message is incorrect", desiredMessage, messenger.getMessage());
+ }
+
+ @Test
+ public void testStaticScriptUsingJsr223() throws Exception {
+ ApplicationContext ctx = new ClassPathXmlApplicationContext("jrubyContextWithJsr223.xml", getClass());
+ Calculator calc = (Calculator) ctx.getBean("calculator");
+ Messenger messenger = (Messenger) ctx.getBean("messenger");
+
+ assertFalse("Scripted object should not be instance of Refreshable", calc instanceof Refreshable);
+ assertFalse("Scripted object should not be instance of Refreshable", messenger instanceof Refreshable);
+
+ assertEquals(calc, calc);
+ assertEquals(messenger, messenger);
+ assertTrue(!messenger.equals(calc));
+ assertNotSame(messenger.hashCode(), calc.hashCode());
+ assertTrue(!messenger.toString().equals(calc.toString()));
+
+ assertEquals(3, calc.add(1, 2));
String desiredMessage = "Hello World!";
assertEquals("Message is incorrect", desiredMessage, messenger.getMessage());
}
@@ -163,6 +184,15 @@ public void testResourceScriptFromTag() throws Exception {
assertEquals(testBean, messengerByName.getTestBean());
}
+ @Test
+ public void testResourceScriptFromTagUsingJsr223() throws Exception {
+ ApplicationContext ctx = new ClassPathXmlApplicationContext("jruby-with-xsd-jsr223.xml", getClass());
+
+ Messenger messenger = (Messenger) ctx.getBean("messenger");
+ assertEquals("Hello World!", messenger.getMessage());
+ assertFalse(messenger instanceof Refreshable);
+ }
+
@Test
public void testPrototypeScriptFromTag() throws Exception {
ApplicationContext ctx = new ClassPathXmlApplicationContext("jruby-with-xsd.xml", getClass());
@@ -185,6 +215,16 @@ public void testInlineScriptFromTag() throws Exception {
Calculator calculator = (Calculator) ctx.getBean("calculator");
assertNotNull(calculator);
assertFalse(calculator instanceof Refreshable);
+ assertEquals(3, calculator.add(1, 2));
+ }
+
+ @Test
+ public void testInlineScriptFromTagUsingJsr223() throws Exception {
+ ApplicationContext ctx = new ClassPathXmlApplicationContext("jruby-with-xsd-jsr223.xml", getClass());
+ Calculator calculator = (Calculator) ctx.getBean("calculator");
+ assertNotNull(calculator);
+ assertFalse(calculator instanceof Refreshable);
+ assertEquals(3, calculator.add(1, 2));
}
@Test
@@ -195,6 +235,15 @@ public void testRefreshableFromTag() throws Exception {
assertTrue("Messenger should be Refreshable", messenger instanceof Refreshable);
}
+ @Test
+ public void testRefreshableFromTagUsingJsr223() throws Exception {
+ ApplicationContext ctx = new ClassPathXmlApplicationContext("jruby-with-xsd-jsr223.xml", getClass());
+ Messenger messenger = (Messenger) ctx.getBean("refreshableMessenger");
+ assertEquals("Hello World!", messenger.getMessage());
+ assertTrue("Messenger should be Refreshable", messenger instanceof Refreshable);
+ }
+
+ @Test
public void testThatMultipleScriptInterfacesAreSupported() throws Exception {
ApplicationContext ctx = new ClassPathXmlApplicationContext("jruby-with-xsd.xml", getClass());
Messenger messenger = (Messenger) ctx.getBean("calculatingMessenger");
@@ -214,6 +263,15 @@ public void testWithComplexArg() throws Exception {
assertEquals(1, printable.count);
}
+ @Test
+ public void testWithComplexArgUsingJsr223() throws Exception {
+ ApplicationContext ctx = new ClassPathXmlApplicationContext("jrubyContextWithJsr223.xml", getClass());
+ Printer printer = (Printer) ctx.getBean("printer");
+ CountingPrintable printable = new CountingPrintable();
+ printer.print(printable);
+ assertEquals(1, printable.count);
+ }
+
@Test
public void testWithPrimitiveArgsInReturnTypeAndParameters() throws Exception {
ApplicationContext ctx = new ClassPathXmlApplicationContext("jrubyContextForPrimitives.xml", getClass());
diff --git a/spring-context/src/test/java/org/springframework/scripting/support/StandardScriptFactoryTests.java b/spring-context/src/test/java/org/springframework/scripting/support/StandardScriptFactoryTests.java
new file mode 100644
index 000000000000..fdd33ad84240
--- /dev/null
+++ b/spring-context/src/test/java/org/springframework/scripting/support/StandardScriptFactoryTests.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2002-2015 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.scripting.support;
+
+import java.util.Arrays;
+
+import org.junit.Test;
+
+import org.springframework.aop.support.AopUtils;
+import org.springframework.aop.target.dynamic.Refreshable;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.support.ClassPathXmlApplicationContext;
+import org.springframework.scripting.Messenger;
+
+import static org.junit.Assert.*;
+
+/**
+ * {@link StandardScriptFactory} (lang:std) tests for JavaScript.
+ *
+ * @author Juergen Hoeller
+ * @since 4.2
+ */
+public class StandardScriptFactoryTests {
+
+ @Test
+ public void testJsr223FromTagWithInterface() throws Exception {
+ ApplicationContext ctx = new ClassPathXmlApplicationContext("jsr223-with-xsd.xml", getClass());
+ assertTrue(Arrays.asList(ctx.getBeanNamesForType(Messenger.class)).contains("messengerWithInterface"));
+ Messenger messenger = (Messenger) ctx.getBean("messengerWithInterface");
+ assertFalse(AopUtils.isAopProxy(messenger));
+ assertEquals("Hello World!", messenger.getMessage());
+ }
+
+ @Test
+ public void testRefreshableJsr223FromTagWithInterface() throws Exception {
+ ApplicationContext ctx = new ClassPathXmlApplicationContext("jsr223-with-xsd.xml", getClass());
+ assertTrue(Arrays.asList(ctx.getBeanNamesForType(Messenger.class)).contains("refreshableMessengerWithInterface"));
+ Messenger messenger = (Messenger) ctx.getBean("refreshableMessengerWithInterface");
+ assertTrue(AopUtils.isAopProxy(messenger));
+ assertTrue(messenger instanceof Refreshable);
+ assertEquals("Hello World!", messenger.getMessage());
+ }
+
+ @Test
+ public void testInlineJsr223FromTagWithInterface() throws Exception {
+ ApplicationContext ctx = new ClassPathXmlApplicationContext("jsr223-with-xsd.xml", getClass());
+ assertTrue(Arrays.asList(ctx.getBeanNamesForType(Messenger.class)).contains("inlineMessengerWithInterface"));
+ Messenger messenger = (Messenger) ctx.getBean("inlineMessengerWithInterface");
+ assertFalse(AopUtils.isAopProxy(messenger));
+ assertEquals("Hello World!", messenger.getMessage());
+ }
+
+}
diff --git a/spring-context/src/test/resources/org/springframework/scripting/groovy/groovy-with-xsd-jsr223.xml b/spring-context/src/test/resources/org/springframework/scripting/groovy/groovy-with-xsd-jsr223.xml
new file mode 100644
index 000000000000..5d7f2739900a
--- /dev/null
+++ b/spring-context/src/test/resources/org/springframework/scripting/groovy/groovy-with-xsd-jsr223.xml
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:lang="http://www.springframework.org/schema/lang"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.0.xsd
+ http://www.springframework.org/schema/lang http://www.springframework.org/schema/lang/spring-lang-4.2.xsd">
+
+ <lang:std id="messenger" script-source="classpath:org/springframework/scripting/groovy/Messenger.groovy">
+ <lang:property name="message" value="Hello World!"/>
+ </lang:std>
+
+ <lang:std id="messengerWithInterface" script-source="classpath:org/springframework/scripting/groovy/Messenger.groovy"
+ script-interfaces="org.springframework.scripting.Messenger"/>
+
+ <lang:std id="refreshableMessenger" refresh-check-delay="5000"
+ script-source="classpath:org/springframework/scripting/groovy/Messenger.groovy">
+ <lang:property name="message" value="Hello World!"/>
+ </lang:std>
+
+ <lang:std id="inlineMessenger" engine="Groovy">
+ <lang:inline-script>
+ package org.springframework.scripting.groovy;
+ import org.springframework.scripting.Messenger
+ class GroovyMessenger implements Messenger {
+ def String message;
+ }
+ return new GroovyMessenger();
+ </lang:inline-script>
+ </lang:std>
+
+ <lang:std id="inlineMessengerWithInterface" engine="Groovy" script-interfaces="org.springframework.scripting.Messenger">
+ <lang:inline-script>
+ package org.springframework.scripting.groovy;
+ import org.springframework.scripting.Messenger
+ class GroovyMessenger implements Messenger {
+ def String message;
+ }
+ return new GroovyMessenger();
+ </lang:inline-script>
+ </lang:std>
+
+</beans>
diff --git a/spring-context/src/test/resources/org/springframework/scripting/groovy/groovy-with-xsd-proxy-target-class.xml b/spring-context/src/test/resources/org/springframework/scripting/groovy/groovy-with-xsd-proxy-target-class.xml
index 822ad89a0662..44a43339403a 100644
--- a/spring-context/src/test/resources/org/springframework/scripting/groovy/groovy-with-xsd-proxy-target-class.xml
+++ b/spring-context/src/test/resources/org/springframework/scripting/groovy/groovy-with-xsd-proxy-target-class.xml
@@ -3,12 +3,11 @@
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:lang="http://www.springframework.org/schema/lang"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.0.xsd
-
http://www.springframework.org/schema/lang http://www.springframework.org/schema/lang/spring-lang-3.1.xsd">
<lang:groovy id="refreshableMessenger" refresh-check-delay="5000" proxy-target-class="true"
- script-source="classpath:org/springframework/scripting/groovy/Messenger.groovy">
- <lang:property name="message" value="Hello World!" />
+ script-source="classpath:org/springframework/scripting/groovy/Messenger.groovy">
+ <lang:property name="message" value="Hello World!"/>
</lang:groovy>
</beans>
diff --git a/spring-context/src/test/resources/org/springframework/scripting/groovy/groovy-with-xsd.xml b/spring-context/src/test/resources/org/springframework/scripting/groovy/groovy-with-xsd.xml
index 8de3522ab662..a573d6a2e093 100644
--- a/spring-context/src/test/resources/org/springframework/scripting/groovy/groovy-with-xsd.xml
+++ b/spring-context/src/test/resources/org/springframework/scripting/groovy/groovy-with-xsd.xml
@@ -33,7 +33,7 @@ class GroovyCalculator implements Calculator {
return x + y;
}
}
- </lang:inline-script>
+ </lang:inline-script>
</lang:groovy>
<lang:groovy id="customizer">
diff --git a/spring-context/src/test/resources/org/springframework/scripting/groovy/groovyContext.xml b/spring-context/src/test/resources/org/springframework/scripting/groovy/groovyContext.xml
index a159b1791e71..e9c674b9592b 100644
--- a/spring-context/src/test/resources/org/springframework/scripting/groovy/groovyContext.xml
+++ b/spring-context/src/test/resources/org/springframework/scripting/groovy/groovyContext.xml
@@ -10,8 +10,7 @@
<bean class="org.springframework.scripting.support.ScriptFactoryPostProcessor"/>
- <bean id="calculator"
- class="org.springframework.scripting.groovy.GroovyScriptFactory">
+ <bean id="calculator" class="org.springframework.scripting.groovy.GroovyScriptFactory">
<constructor-arg>
<value>inline:
package org.springframework.scripting.groovy;
@@ -25,28 +24,24 @@ class GroovyCalculator implements Calculator {
</constructor-arg>
</bean>
- <bean id="messenger"
- class="org.springframework.scripting.groovy.GroovyScriptFactory">
+ <bean id="messenger" class="org.springframework.scripting.groovy.GroovyScriptFactory">
<constructor-arg value="classpath:org/springframework/scripting/groovy/Messenger.groovy"/>
<property name="message" value="Hello World!"/>
</bean>
- <bean id="messengerPrototype"
- class="org.springframework.scripting.groovy.GroovyScriptFactory"
+ <bean id="messengerPrototype" class="org.springframework.scripting.groovy.GroovyScriptFactory"
scope="prototype">
<constructor-arg value="classpath:org/springframework/scripting/groovy/Messenger.groovy"/>
<property name="message" value="Hello World!"/>
</bean>
- <bean id="messengerInstance"
- class="org.springframework.scripting.groovy.GroovyScriptFactory">
+ <bean id="messengerInstance" class="org.springframework.scripting.groovy.GroovyScriptFactory">
<constructor-arg value="classpath:org/springframework/scripting/groovy/MessengerInstance.groovy"/>
<property name="message" ref="myMessage"/>
</bean>
- <bean id="messengerInstanceInline"
- class="org.springframework.scripting.groovy.GroovyScriptFactory">
- <constructor-arg>
+ <bean id="messengerInstanceInline" class="org.springframework.scripting.groovy.GroovyScriptFactory">
+ <constructor-arg>
<value>inline:
package org.springframework.scripting.groovy;
import org.springframework.scripting.Messenger
diff --git a/spring-context/src/test/resources/org/springframework/scripting/groovy/groovyContextWithJsr223.xml b/spring-context/src/test/resources/org/springframework/scripting/groovy/groovyContextWithJsr223.xml
new file mode 100644
index 000000000000..c5f69cd7eaa7
--- /dev/null
+++ b/spring-context/src/test/resources/org/springframework/scripting/groovy/groovyContextWithJsr223.xml
@@ -0,0 +1,61 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:lang="http://www.springframework.org/schema/lang"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans
+ http://www.springframework.org/schema/beans/spring-beans.xsd"
+ default-lazy-init="true">
+
+ <bean class="org.springframework.scripting.support.ScriptFactoryPostProcessor"/>
+
+ <bean id="calculator" class="org.springframework.scripting.support.StandardScriptFactory">
+ <constructor-arg value="Groovy"/>
+ <constructor-arg>
+ <value>inline:
+package org.springframework.scripting.groovy;
+import org.springframework.scripting.Calculator
+class GroovyCalculator implements Calculator {
+ int add(int x, int y) {
+ return x + y;
+ }
+}
+ </value>
+ </constructor-arg>
+ </bean>
+
+ <bean id="messenger" class="org.springframework.scripting.support.StandardScriptFactory">
+ <constructor-arg value="classpath:org/springframework/scripting/groovy/Messenger.groovy"/>
+ <property name="message" value="Hello World!"/>
+ </bean>
+
+ <bean id="messengerPrototype" class="org.springframework.scripting.support.StandardScriptFactory"
+ scope="prototype">
+ <constructor-arg value="classpath:org/springframework/scripting/groovy/Messenger.groovy"/>
+ <property name="message" value="Hello World!"/>
+ </bean>
+
+ <bean id="messengerInstance" class="org.springframework.scripting.support.StandardScriptFactory">
+ <constructor-arg value="classpath:org/springframework/scripting/groovy/MessengerInstance.groovy"/>
+ <property name="message" ref="myMessage"/>
+ </bean>
+
+ <bean id="messengerInstanceInline" class="org.springframework.scripting.support.StandardScriptFactory">
+ <constructor-arg value="Groovy"/>
+ <constructor-arg>
+ <value>inline:
+package org.springframework.scripting.groovy;
+import org.springframework.scripting.Messenger
+class GroovyMessenger implements Messenger {
+ def String message;
+}
+return new GroovyMessenger();
+ </value>
+ </constructor-arg>
+ <property name="message" ref="myMessage"/>
+ </bean>
+
+ <bean id="myMessage" class="java.lang.String">
+ <constructor-arg value="Hello World!"/>
+ </bean>
+
+</beans>
diff --git a/spring-context/src/test/resources/org/springframework/scripting/jruby/MessengerWithInstance.rb b/spring-context/src/test/resources/org/springframework/scripting/jruby/MessengerWithInstance.rb
new file mode 100644
index 000000000000..3bdb75e3c9f6
--- /dev/null
+++ b/spring-context/src/test/resources/org/springframework/scripting/jruby/MessengerWithInstance.rb
@@ -0,0 +1,25 @@
+require 'java'
+
+class RubyMessenger
+ include org.springframework.scripting.ConfigurableMessenger
+
+ @@message = "Hello World!"
+
+ def setMessage(message)
+ @@message = message
+ end
+
+ def getMessage
+ @@message
+ end
+
+ def setTestBean(testBean)
+ @@testBean = testBean
+ end
+
+ def getTestBean
+ @@testBean
+ end
+end
+
+RubyMessenger.new
diff --git a/spring-context/src/test/resources/org/springframework/scripting/jruby/PrinterWithInstance.rb b/spring-context/src/test/resources/org/springframework/scripting/jruby/PrinterWithInstance.rb
new file mode 100644
index 000000000000..58f8a7b5123d
--- /dev/null
+++ b/spring-context/src/test/resources/org/springframework/scripting/jruby/PrinterWithInstance.rb
@@ -0,0 +1,11 @@
+require 'java'
+
+class RubyPrinter
+ include org.springframework.scripting.jruby.Printer
+
+ def print(obj)
+ puts obj.getContent
+ end
+end
+
+RubyPrinter.new
diff --git a/spring-context/src/test/resources/org/springframework/scripting/jruby/jruby-with-xsd-jsr223.xml b/spring-context/src/test/resources/org/springframework/scripting/jruby/jruby-with-xsd-jsr223.xml
new file mode 100644
index 000000000000..c07e66dc45ac
--- /dev/null
+++ b/spring-context/src/test/resources/org/springframework/scripting/jruby/jruby-with-xsd-jsr223.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:lang="http://www.springframework.org/schema/lang"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd
+ http://www.springframework.org/schema/lang http://www.springframework.org/schema/lang/spring-lang-4.2.xsd">
+
+ <lang:std id="messenger"
+ script-source="classpath:org/springframework/scripting/jruby/MessengerWithInstance.rb">
+ </lang:std>
+
+ <lang:std id="calculator" engine="jruby">
+ <lang:inline-script>
+ require 'java'
+
+ class RubyCalculator
+ include org.springframework.scripting.Calculator
+
+ def add(x, y)
+ x + y
+ end
+ end
+
+ RubyCalculator.new
+ </lang:inline-script>
+ </lang:std>
+
+ <lang:std id="refreshableMessenger"
+ script-source="classpath:org/springframework/scripting/jruby/MessengerWithInstance.rb"
+ refresh-check-delay="5000">
+ </lang:std>
+
+</beans>
\ No newline at end of file
diff --git a/spring-context/src/test/resources/org/springframework/scripting/jruby/jrubyContextWithJsr223.xml b/spring-context/src/test/resources/org/springframework/scripting/jruby/jrubyContextWithJsr223.xml
new file mode 100644
index 000000000000..f552bc90c29c
--- /dev/null
+++ b/spring-context/src/test/resources/org/springframework/scripting/jruby/jrubyContextWithJsr223.xml
@@ -0,0 +1,40 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE beans PUBLIC "-//SPRING//DTD BEAN 2.0//EN" "http://www.springframework.org/dtd/spring-beans-2.0.dtd">
+
+<beans>
+
+ <bean class="org.springframework.scripting.support.ScriptFactoryPostProcessor"/>
+
+ <bean id="calculator" class="org.springframework.scripting.support.StandardScriptFactory">
+ <constructor-arg value="jruby"/>
+ <constructor-arg>
+ <value>inline:
+require 'java'
+
+class RubyCalculator
+ include org.springframework.scripting.Calculator
+
+ def add(x, y)
+ x + y
+ end
+end
+
+RubyCalculator.new
+ </value>
+ </constructor-arg>
+ <constructor-arg value="org.springframework.scripting.Calculator"/>
+ </bean>
+
+ <bean id="messenger" class="org.springframework.scripting.support.StandardScriptFactory">
+ <constructor-arg value="jruby"/>
+ <constructor-arg value="classpath:org/springframework/scripting/jruby/MessengerWithInstance.rb"/>
+ <constructor-arg value="org.springframework.scripting.Messenger"/>
+ </bean>
+
+ <bean id="printer" class="org.springframework.scripting.support.StandardScriptFactory">
+ <constructor-arg value="jruby"/>
+ <constructor-arg value="classpath:org/springframework/scripting/jruby/PrinterWithInstance.rb"/>
+ <constructor-arg value="org.springframework.scripting.jruby.Printer"/>
+ </bean>
+
+</beans>
diff --git a/spring-context/src/test/resources/org/springframework/scripting/support/Messenger.js b/spring-context/src/test/resources/org/springframework/scripting/support/Messenger.js
new file mode 100644
index 000000000000..5277c3c73aeb
--- /dev/null
+++ b/spring-context/src/test/resources/org/springframework/scripting/support/Messenger.js
@@ -0,0 +1 @@
+function getMessage() { return "Hello World!" }
diff --git a/spring-context/src/test/resources/org/springframework/scripting/support/jsr223-with-xsd.xml b/spring-context/src/test/resources/org/springframework/scripting/support/jsr223-with-xsd.xml
new file mode 100644
index 000000000000..b88c93bcabcc
--- /dev/null
+++ b/spring-context/src/test/resources/org/springframework/scripting/support/jsr223-with-xsd.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:lang="http://www.springframework.org/schema/lang"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.0.xsd
+ http://www.springframework.org/schema/lang http://www.springframework.org/schema/lang/spring-lang-4.2.xsd">
+
+ <lang:std id="messengerWithInterface" script-source="classpath:org/springframework/scripting/support/Messenger.js"
+ script-interfaces="org.springframework.scripting.Messenger"/>
+
+ <lang:std id="refreshableMessengerWithInterface" refresh-check-delay="5000"
+ script-source="classpath:org/springframework/scripting/support/Messenger.js"
+ script-interfaces="org.springframework.scripting.Messenger">
+ </lang:std>
+
+ <lang:std id="inlineMessengerWithInterface" engine="JavaScript"
+ script-interfaces="org.springframework.scripting.Messenger">
+ <lang:inline-script>
+ function getMessage() { return "Hello World!" }
+ </lang:inline-script>
+ </lang:std>
+
+</beans>
|
935b63527dacf2c380a53ce83ca5b1d06aadca75
|
ReactiveX-RxJava
|
Baseline Performance Tests--Start of suite of general performance tests for comparing overall changes.-
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/perf/java/rx/usecases/PerfBaseline.java b/rxjava-core/src/perf/java/rx/usecases/PerfBaseline.java
new file mode 100644
index 0000000000..c349253f84
--- /dev/null
+++ b/rxjava-core/src/perf/java/rx/usecases/PerfBaseline.java
@@ -0,0 +1,36 @@
+package rx.usecases;
+
+import java.util.Iterator;
+
+import org.openjdk.jmh.annotations.GenerateMicroBenchmark;
+
+public class PerfBaseline {
+
+ @GenerateMicroBenchmark
+ public void forLoopConsumption(UseCaseInput input) throws InterruptedException {
+ for (int i = 0; i < input.size; i++) {
+ input.observer.onNext(i);
+ }
+ }
+
+ @GenerateMicroBenchmark
+ public void observableConsumption(UseCaseInput input) throws InterruptedException {
+ input.observable.subscribe(input.observer);
+ input.awaitCompletion();
+ }
+
+ @GenerateMicroBenchmark
+ public void iterableViaForLoopConsumption(UseCaseInput input) throws InterruptedException {
+ for (int i : input.iterable) {
+ input.observer.onNext(i);
+ }
+ }
+
+ @GenerateMicroBenchmark
+ public void iterableViaHasNextConsumption(UseCaseInput input) throws InterruptedException {
+ Iterator<Integer> iterator = input.iterable.iterator();
+ while (iterator.hasNext()) {
+ input.observer.onNext(iterator.next());
+ }
+ }
+}
diff --git a/rxjava-core/src/perf/java/rx/usecases/PerfObserveOn.java b/rxjava-core/src/perf/java/rx/usecases/PerfObserveOn.java
index 3b6aa1d2ea..6200cae9d4 100644
--- a/rxjava-core/src/perf/java/rx/usecases/PerfObserveOn.java
+++ b/rxjava-core/src/perf/java/rx/usecases/PerfObserveOn.java
@@ -22,9 +22,21 @@
public class PerfObserveOn {
@GenerateMicroBenchmark
- public void observeOn(UseCaseInput input) throws InterruptedException {
+ public void observeOnComputation(UseCaseInput input) throws InterruptedException {
input.observable.observeOn(Schedulers.computation()).subscribe(input.observer);
input.awaitCompletion();
}
+ @GenerateMicroBenchmark
+ public void observeOnNewThread(UseCaseInput input) throws InterruptedException {
+ input.observable.observeOn(Schedulers.newThread()).subscribe(input.observer);
+ input.awaitCompletion();
+ }
+
+ @GenerateMicroBenchmark
+ public void observeOnImmediate(UseCaseInput input) throws InterruptedException {
+ input.observable.observeOn(Schedulers.immediate()).subscribe(input.observer);
+ input.awaitCompletion();
+ }
+
}
diff --git a/rxjava-core/src/perf/java/rx/usecases/PerfTransforms.java b/rxjava-core/src/perf/java/rx/usecases/PerfTransforms.java
index 8b56734e5d..ee8150c3d0 100644
--- a/rxjava-core/src/perf/java/rx/usecases/PerfTransforms.java
+++ b/rxjava-core/src/perf/java/rx/usecases/PerfTransforms.java
@@ -19,12 +19,24 @@
import rx.Observable;
import rx.functions.Func1;
-import rx.schedulers.Schedulers;
public class PerfTransforms {
@GenerateMicroBenchmark
- public void mapTransformation(UseCaseInput input) throws InterruptedException {
+ public void mapPassThru(UseCaseInput input) throws InterruptedException {
+ input.observable.map(new Func1<Integer, Integer>() {
+
+ @Override
+ public Integer call(Integer i) {
+ return i;
+ }
+
+ }).subscribe(input.observer);
+ input.awaitCompletion();
+ }
+
+ @GenerateMicroBenchmark
+ public void mapIntStringInt(UseCaseInput input) throws InterruptedException {
input.observable.map(new Func1<Integer, String>() {
@Override
@@ -44,7 +56,7 @@ public Integer call(String i) {
}
@GenerateMicroBenchmark
- public void flatMapTransforms(UseCaseInput input) throws InterruptedException {
+ public void flatMapInt(UseCaseInput input) throws InterruptedException {
input.observable.flatMap(new Func1<Integer, Observable<Integer>>() {
@Override
diff --git a/rxjava-core/src/perf/java/rx/usecases/UseCaseInput.java b/rxjava-core/src/perf/java/rx/usecases/UseCaseInput.java
index c18bfcc0ed..b3b7958118 100644
--- a/rxjava-core/src/perf/java/rx/usecases/UseCaseInput.java
+++ b/rxjava-core/src/perf/java/rx/usecases/UseCaseInput.java
@@ -15,6 +15,7 @@
*/
package rx.usecases;
+import java.util.Iterator;
import java.util.concurrent.CountDownLatch;
import org.openjdk.jmh.annotations.Param;
@@ -36,6 +37,7 @@ public class UseCaseInput {
@Param({ "1", "1024" })
public int size;
+ public Iterable<Integer> iterable;
public Observable<Integer> observable;
public Observer<Integer> observer;
@@ -52,6 +54,34 @@ public void call(Subscriber<? super Integer> o) {
o.onCompleted();
}
});
+
+ iterable = new Iterable<Integer>() {
+
+ @Override
+ public Iterator<Integer> iterator() {
+ return new Iterator<Integer>() {
+
+ int i=0;
+
+ @Override
+ public boolean hasNext() {
+ return i < size;
+ }
+
+ @Override
+ public Integer next() {
+ return i++;
+ }
+
+ @Override
+ public void remove() {
+
+ }
+
+ };
+ }
+
+ };
latch = new CountDownLatch(1);
|
4a1d547c823016965040237c038832468984fd05
|
intellij-community
|
plugins: tooltip for plugins with newer version- (IDEA-75998)--
|
a
|
https://github.com/JetBrains/intellij-community
|
diff --git a/platform/platform-impl/src/com/intellij/ide/plugins/InstalledPluginsTableModel.java b/platform/platform-impl/src/com/intellij/ide/plugins/InstalledPluginsTableModel.java
index 840b8e1ccc43d..00b7d1672b357 100644
--- a/platform/platform-impl/src/com/intellij/ide/plugins/InstalledPluginsTableModel.java
+++ b/platform/platform-impl/src/com/intellij/ide/plugins/InstalledPluginsTableModel.java
@@ -514,6 +514,7 @@ public Component getTableCellRendererComponent(JTable table, Object value, boole
else if (hasNewerVersion(myPluginDescriptor.getPluginId()) ||
PluginManagerUISettings.getInstance().myOutdatedPlugins.contains(idString)) {
myNameLabel.setIcon(IconLoader.getIcon("/nodes/pluginobsolete.png"));
+ myPanel.setToolTipText("Newer version of the plugin is available");
}
else {
myNameLabel.setIcon(IconLoader.getIcon("/nodes/plugin.png"));
|
ed41fede1fc5861998cd375ffee6980be106cb0c
|
orientdb
|
Released 0.9.8 with the fix on partitioning in- Key/Value Server--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/base/OrientMonoThreadTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/base/OrientMonoThreadTest.java
index b4e33bd210e..305f7124f2d 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/base/OrientMonoThreadTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/base/OrientMonoThreadTest.java
@@ -19,7 +19,6 @@
import com.orientechnologies.common.profiler.OProfiler;
import com.orientechnologies.common.test.SpeedTestMonoThread;
-import com.orientechnologies.orient.client.distributed.OEngineDistributed;
import com.orientechnologies.orient.client.remote.OEngineRemote;
import com.orientechnologies.orient.core.Orient;
@@ -28,7 +27,6 @@ public abstract class OrientMonoThreadTest extends SpeedTestMonoThread {
public OrientMonoThreadTest(int iCycles) {
super(iCycles);
Orient.instance().registerEngine(new OEngineRemote());
- Orient.instance().registerEngine(new OEngineDistributed());
}
public void deinit() {
|
67e2a30687c80776e00b36c94b79589be95a6d06
|
drools
|
[DROOLS-601] fix queries when used in combination- with agenda-groups--
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/main/java/org/drools/compiler/rule/builder/PatternBuilder.java b/drools-compiler/src/main/java/org/drools/compiler/rule/builder/PatternBuilder.java
index 353bf79b9ea..25d2763e52d 100644
--- a/drools-compiler/src/main/java/org/drools/compiler/rule/builder/PatternBuilder.java
+++ b/drools-compiler/src/main/java/org/drools/compiler/rule/builder/PatternBuilder.java
@@ -198,11 +198,7 @@ public RuleConditionElement build( RuleBuildContext context,
// it might be a recursive query, so check for same names
if ( context.getRule().getName().equals( patternDescr.getObjectType() ) ) {
// it's a query so delegate to the QueryElementBuilder
- QueryElementBuilder qeBuilder = QueryElementBuilder.getInstance();
- rce = qeBuilder.build( context,
- descr,
- prefixPattern,
- (QueryImpl) context.getRule() );
+ rce = buildQueryElement(context, descr, prefixPattern, (QueryImpl) context.getRule());
}
if ( rce == null ) {
@@ -210,11 +206,7 @@ public RuleConditionElement build( RuleBuildContext context,
RuleImpl rule = context.getPkg().getRule( patternDescr.getObjectType() );
if ( rule instanceof QueryImpl ) {
// it's a query so delegate to the QueryElementBuilder
- QueryElementBuilder qeBuilder = QueryElementBuilder.getInstance();
- rce = qeBuilder.build( context,
- descr,
- prefixPattern,
- (QueryImpl) rule );
+ rce = buildQueryElement(context, descr, prefixPattern, (QueryImpl) rule);
}
}
@@ -231,11 +223,7 @@ public RuleConditionElement build( RuleBuildContext context,
RuleImpl rule = pkgReg.getPackage().getRule( patternDescr.getObjectType() );
if ( rule instanceof QueryImpl) {
// it's a query so delegate to the QueryElementBuilder
- QueryElementBuilder qeBuilder = QueryElementBuilder.getInstance();
- rce = qeBuilder.build( context,
- descr,
- prefixPattern,
- (QueryImpl) rule );
+ rce = buildQueryElement(context, descr, prefixPattern, (QueryImpl) rule);
break;
}
}
@@ -354,6 +342,13 @@ public RuleConditionElement build( RuleBuildContext context,
return pattern;
}
+ private RuleConditionElement buildQueryElement(RuleBuildContext context, BaseDescr descr, Pattern prefixPattern, QueryImpl rule) {
+ if (context.getRule() != rule) {
+ context.getRule().addUsedQuery(rule);
+ }
+ return QueryElementBuilder.getInstance().build( context, descr, prefixPattern, rule );
+ }
+
protected void processDuplicateBindings( boolean isUnification,
PatternDescr patternDescr,
Pattern pattern,
diff --git a/drools-compiler/src/test/java/org/drools/compiler/integrationtests/Misc2Test.java b/drools-compiler/src/test/java/org/drools/compiler/integrationtests/Misc2Test.java
index 0857a2778ec..d0ca3a515b9 100644
--- a/drools-compiler/src/test/java/org/drools/compiler/integrationtests/Misc2Test.java
+++ b/drools-compiler/src/test/java/org/drools/compiler/integrationtests/Misc2Test.java
@@ -82,6 +82,7 @@
import org.kie.api.runtime.KieContainer;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.StatelessKieSession;
+import org.kie.api.runtime.rule.Agenda;
import org.kie.api.runtime.rule.FactHandle;
import org.kie.internal.KnowledgeBase;
import org.kie.internal.KnowledgeBaseFactory;
@@ -6652,4 +6653,134 @@ public void testGenericsInRHSWithModify() {
ksession.insert("1");
ksession.fireAllRules();
}
+
+ @Test
+ public void testQueryWithAgendaGroup() {
+ // DROOLS-601
+ String drl =
+ "package org.drools.test; " +
+ "global java.util.List list; " +
+
+ "query foo( Integer $i ) " +
+ " $i := Integer() " +
+ "end " +
+
+ "rule Detect " +
+ "agenda-group 'one' " +
+ "when " +
+ " foo( $i ; ) " +
+ "then " +
+ " list.add( $i ); " +
+ "end " +
+
+ "rule OnceMore " +
+ "agenda-group 'two' " +
+ "no-loop " +
+ "when " +
+ " $i : Integer() " +
+ "then " +
+ " update( $i );" +
+ "end " +
+ "";
+
+ KieHelper helper = new KieHelper();
+ helper.addContent( drl, ResourceType.DRL );
+ KieSession kieSession = helper.build().newKieSession();
+
+ List<Integer> list = new ArrayList<Integer>();
+ kieSession.setGlobal( "list", list );
+
+ FactHandle handle = kieSession.insert( 42 );
+
+ Agenda agenda = kieSession.getAgenda();
+ agenda.getAgendaGroup("two").setFocus();
+ agenda.getAgendaGroup("one").setFocus();
+
+ kieSession.fireAllRules();
+ assertEquals( Arrays.asList( 42 ), list );
+
+ kieSession.delete( handle );
+
+ kieSession.insert( 99 );
+
+ agenda.getAgendaGroup("two").setFocus();
+ agenda.getAgendaGroup("one").setFocus();
+
+ kieSession.fireAllRules();
+ assertEquals( Arrays.asList( 42, 99 ), list );
+ }
+
+ @Test
+ public void testQueryUsingQueryWithAgendaGroup() {
+ // DROOLS-601
+ String drl =
+ "package org.drools.test; " +
+ "global java.util.List list; " +
+
+ "query bar( String $s ) " +
+ " $s := String() " +
+ "end " +
+ "query foo( Integer $i, String $s ) " +
+ " bar( $s ; ) " +
+ " $i := Integer( toString() == $s ) " +
+ "end " +
+
+ "rule Detect " +
+ "agenda-group 'one' " +
+ "when " +
+ " foo( $i, $s ; ) " +
+ "then " +
+ " list.add( $i ); " +
+ "end " +
+
+ "rule UpdateInt " +
+ "agenda-group 'two' " +
+ "no-loop " +
+ "when " +
+ " $i : Integer() " +
+ "then " +
+ " update( $i );" +
+ "end " +
+
+ "rule UpdateString " +
+ "agenda-group 'three' " +
+ "no-loop " +
+ "when " +
+ " $s : String() " +
+ "then " +
+ " update( $s );" +
+ "end " +
+ "";
+
+ KieHelper helper = new KieHelper();
+ helper.addContent( drl, ResourceType.DRL );
+ KieSession kieSession = helper.build().newKieSession();
+
+ List<Integer> list = new ArrayList<Integer>();
+ kieSession.setGlobal( "list", list );
+
+ FactHandle iFH = kieSession.insert( 42 );
+ FactHandle sFH = kieSession.insert( "42" );
+
+ Agenda agenda = kieSession.getAgenda();
+ agenda.getAgendaGroup("three").setFocus();
+ agenda.getAgendaGroup("two").setFocus();
+ agenda.getAgendaGroup("one").setFocus();
+
+ kieSession.fireAllRules();
+ assertEquals( Arrays.asList( 42 ), list );
+
+ //kieSession.delete( iFH );
+ kieSession.delete( sFH );
+
+ kieSession.insert( 99 );
+ kieSession.insert( "99" );
+
+ agenda.getAgendaGroup("three").setFocus();
+ agenda.getAgendaGroup("two").setFocus();
+ agenda.getAgendaGroup("one").setFocus();
+
+ kieSession.fireAllRules();
+ assertEquals( Arrays.asList( 42, 99 ), list );
+ }
}
\ No newline at end of file
diff --git a/drools-compiler/src/test/java/org/drools/compiler/rule/builder/dialect/mvel/MVELConsequenceBuilderTest.java b/drools-compiler/src/test/java/org/drools/compiler/rule/builder/dialect/mvel/MVELConsequenceBuilderTest.java
index e05a6002688..da2117408ad 100644
--- a/drools-compiler/src/test/java/org/drools/compiler/rule/builder/dialect/mvel/MVELConsequenceBuilderTest.java
+++ b/drools-compiler/src/test/java/org/drools/compiler/rule/builder/dialect/mvel/MVELConsequenceBuilderTest.java
@@ -109,7 +109,7 @@ public void testSimpleExpression() throws Exception {
final AgendaItem item = new AgendaItemImpl( 0, tuple, 10,
pctxFactory.createPropagationContext(1, 1, null, tuple, null),
- new RuleTerminalNode(0, new CompositeObjectSinkAdapterTest.MockBetaNode(), context.getRule(), subrule, 0, new BuildContext( kBase, null )), null, null);
+ new RuleTerminalNode(0, new CompositeObjectSinkAdapterTest.MockBetaNode(), context.getRule(), subrule, 0, new BuildContext( kBase, null )), null);
final DefaultKnowledgeHelper kbHelper = new DefaultKnowledgeHelper( ksession );
kbHelper.setActivation( item );
((MVELConsequence) context.getRule().getConsequence()).compile( (MVELDialectRuntimeData) pkgBuilder.getPackageRegistry( pkg.getName() ).getDialectRuntimeRegistry().getDialectData( "mvel" ));
@@ -177,7 +177,6 @@ public void testImperativeCodeError() throws Exception {
final AgendaItem item = new AgendaItemImpl( 0,
tuple,
10,
- null,
null, null, null);
final DefaultKnowledgeHelper kbHelper = new DefaultKnowledgeHelper( ksession );
kbHelper.setActivation( item );
diff --git a/drools-compiler/src/test/java/org/drools/compiler/rule/builder/dialect/mvel/MVELSalienceBuilderTest.java b/drools-compiler/src/test/java/org/drools/compiler/rule/builder/dialect/mvel/MVELSalienceBuilderTest.java
index 886eefa3317..b06765483f6 100644
--- a/drools-compiler/src/test/java/org/drools/compiler/rule/builder/dialect/mvel/MVELSalienceBuilderTest.java
+++ b/drools-compiler/src/test/java/org/drools/compiler/rule/builder/dialect/mvel/MVELSalienceBuilderTest.java
@@ -1,42 +1,40 @@
package org.drools.compiler.rule.builder.dialect.mvel;
-import java.util.HashMap;
-import java.util.Map;
-
+import org.drools.compiler.Person;
import org.drools.compiler.builder.impl.KnowledgeBuilderImpl;
import org.drools.compiler.compiler.DialectCompiletimeRegistry;
+import org.drools.compiler.lang.descr.AttributeDescr;
+import org.drools.compiler.lang.descr.RuleDescr;
+import org.drools.compiler.rule.builder.SalienceBuilder;
import org.drools.core.WorkingMemory;
-import org.drools.core.common.AgendaItemImpl;
-import org.drools.core.common.InternalWorkingMemory;
-import org.drools.core.definitions.InternalKnowledgePackage;
-import org.drools.core.definitions.impl.KnowledgePackageImpl;
-import org.drools.core.impl.InternalKnowledgeBase;
-import org.drools.core.impl.StatefulKnowledgeSessionImpl;
-import org.junit.Before;
-import org.junit.Test;
-import org.kie.api.definition.rule.Rule;
-
-import static org.junit.Assert.*;
-
-import org.drools.compiler.Person;
import org.drools.core.base.ClassObjectType;
import org.drools.core.base.DefaultKnowledgeHelper;
import org.drools.core.base.mvel.MVELSalienceExpression;
import org.drools.core.common.AgendaItem;
+import org.drools.core.common.AgendaItemImpl;
import org.drools.core.common.InternalFactHandle;
-import org.drools.compiler.lang.descr.AttributeDescr;
-import org.drools.compiler.lang.descr.RuleDescr;
+import org.drools.core.definitions.InternalKnowledgePackage;
+import org.drools.core.definitions.impl.KnowledgePackageImpl;
+import org.drools.core.impl.InternalKnowledgeBase;
+import org.drools.core.impl.StatefulKnowledgeSessionImpl;
import org.drools.core.reteoo.LeftTupleImpl;
import org.drools.core.reteoo.RuleTerminalNode;
import org.drools.core.rule.Declaration;
import org.drools.core.rule.MVELDialectRuntimeData;
import org.drools.core.rule.Pattern;
-import org.drools.compiler.rule.builder.SalienceBuilder;
import org.drools.core.spi.ObjectType;
import org.drools.core.spi.PatternExtractor;
import org.drools.core.spi.Salience;
+import org.junit.Before;
+import org.junit.Test;
+import org.kie.api.definition.rule.Rule;
import org.kie.internal.KnowledgeBaseFactory;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+
public class MVELSalienceBuilderTest {
private InstrumentedBuildContent context;
private InternalKnowledgeBase kBase ;
@@ -100,7 +98,7 @@ public void testSimpleExpression() {
RuleTerminalNode rtn = new RuleTerminalNode();
rtn.setSalienceDeclarations( context.getDeclarationResolver().getDeclarations( context.getRule() ).values().toArray( new Declaration[1] ) );
- AgendaItem item = new AgendaItemImpl(0, tuple, 0, null, rtn, null, null);
+ AgendaItem item = new AgendaItemImpl(0, tuple, 0, null, rtn, null);
assertEquals( 25,
@@ -182,7 +180,7 @@ public SalienceEvaluator(InternalKnowledgeBase kBase,
RuleTerminalNode rtn = new RuleTerminalNode();
rtn.setSalienceDeclarations( context.getDeclarationResolver().getDeclarations( context.getRule() ).values().toArray( new Declaration[1] ) );
- item = new AgendaItemImpl(0, tuple, 0, null, rtn, null, null);
+ item = new AgendaItemImpl(0, tuple, 0, null, rtn, null);
}
public void run() {
diff --git a/drools-core/src/main/java/org/drools/core/common/AgendaItemImpl.java b/drools-core/src/main/java/org/drools/core/common/AgendaItemImpl.java
index f23164d134b..5f3d6bbbcdf 100644
--- a/drools-core/src/main/java/org/drools/core/common/AgendaItemImpl.java
+++ b/drools-core/src/main/java/org/drools/core/common/AgendaItemImpl.java
@@ -16,11 +16,6 @@
package org.drools.core.common;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-import org.kie.api.runtime.rule.FactHandle;
import org.drools.core.definitions.rule.impl.RuleImpl;
import org.drools.core.phreak.RuleAgendaItem;
import org.drools.core.reteoo.LeftTuple;
@@ -32,8 +27,13 @@
import org.drools.core.spi.PropagationContext;
import org.drools.core.util.LinkedList;
import org.drools.core.util.LinkedListEntry;
+import org.kie.api.runtime.rule.FactHandle;
import org.kie.internal.event.rule.ActivationUnMatchListener;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
/**
* Item entry in the <code>Agenda</code>.
*/
@@ -78,7 +78,6 @@ public class AgendaItemImpl
private boolean matched;
private boolean active;
private ActivationUnMatchListener activationUnMatchListener;
- private RuleAgendaItem ruleAgendaItem;
// ------------------------------------------------------------
// Constructors
@@ -92,7 +91,6 @@ public AgendaItemImpl() {
* Construct.
*
* @param tuple The tuple.
- * @param ruleAgendaItem
* @param agendaGroup
*/
public AgendaItemImpl(final long activationNumber,
@@ -100,7 +98,6 @@ public AgendaItemImpl(final long activationNumber,
final int salience,
final PropagationContext context,
final TerminalNode rtn,
- final RuleAgendaItem ruleAgendaItem,
final InternalAgendaGroup agendaGroup) {
this.tuple = tuple;
this.context = context;
@@ -109,7 +106,6 @@ public AgendaItemImpl(final long activationNumber,
this.activationNumber = activationNumber;
this.index = -1;
this.matched = true;
- this.ruleAgendaItem = ruleAgendaItem;
this.agendaGroup = agendaGroup;
}
@@ -171,7 +167,7 @@ public void setFactHandle(InternalFactHandle factHandle) {
@Override
public RuleAgendaItem getRuleAgendaItem() {
- return ruleAgendaItem;
+ return null;
}
/*
@@ -221,7 +217,7 @@ public void removeAllBlockersAndBlocked(InternalAgenda agenda) {
removeBlocked(dep);
AgendaItem justified = (AgendaItem) dep.getJustified();
if (justified.getBlockers().isEmpty()) {
- agenda.stageLeftTuple(ruleAgendaItem,justified);
+ agenda.stageLeftTuple(null,justified);
}
dep = tmp;
}
diff --git a/drools-core/src/main/java/org/drools/core/common/DefaultAgenda.java b/drools-core/src/main/java/org/drools/core/common/DefaultAgenda.java
index 73695080370..5fee77d0794 100644
--- a/drools-core/src/main/java/org/drools/core/common/DefaultAgenda.java
+++ b/drools-core/src/main/java/org/drools/core/common/DefaultAgenda.java
@@ -18,6 +18,7 @@
import org.drools.core.RuleBaseConfiguration;
import org.drools.core.WorkingMemory;
+import org.drools.core.definitions.rule.impl.RuleImpl;
import org.drools.core.impl.InternalKnowledgeBase;
import org.drools.core.impl.StatefulKnowledgeSessionImpl;
import org.drools.core.phreak.RuleAgendaItem;
@@ -30,6 +31,7 @@
import org.drools.core.reteoo.TerminalNode;
import org.drools.core.rule.Declaration;
import org.drools.core.rule.EntryPointId;
+import org.drools.core.rule.QueryImpl;
import org.drools.core.spi.Activation;
import org.drools.core.spi.AgendaGroup;
import org.drools.core.spi.ConsequenceException;
@@ -61,7 +63,8 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
-
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicBoolean;
/**
@@ -102,7 +105,9 @@ public class DefaultAgenda
private InternalAgendaGroup main;
- private final LinkedList<RuleAgendaItem> eager = new LinkedList<RuleAgendaItem>();
+ private final org.drools.core.util.LinkedList<RuleAgendaItem> eager = new org.drools.core.util.LinkedList<RuleAgendaItem>();
+
+ private final ConcurrentMap<QueryImpl, RuleAgendaItem> queries = new ConcurrentHashMap<QueryImpl, RuleAgendaItem>();
private AgendaGroupFactory agendaGroupFactory;
@@ -265,7 +270,7 @@ public WorkingMemory getWorkingMemory() {
}
@Override
- public void addEagerRuleAgendaItem(final RuleAgendaItem item) {
+ public void addEagerRuleAgendaItem(RuleAgendaItem item) {
if ( workingMemory.getSessionConfiguration().getForceEagerActivationFilter().accept(item.getRule()) ) {
item.getRuleExecutor().evaluateNetwork(workingMemory);
return;
@@ -298,6 +303,22 @@ public void removeEagerRuleAgendaItem(RuleAgendaItem item) {
}
}
+ @Override
+ public void addQueryAgendaItem(RuleAgendaItem item) {
+ queries.putIfAbsent( (QueryImpl) item.getRule(), item );
+ if ( log.isTraceEnabled() ) {
+ log.trace("Added {} to query evaluation list.", item.getRule().getName() );
+ }
+ }
+
+ @Override
+ public void removeQueryAgendaItem(RuleAgendaItem item) {
+ queries.remove( (QueryImpl) item.getRule() );
+ if ( log.isTraceEnabled() ) {
+ log.trace("Removed {} from query evaluation list.", item.getRule().getName() );
+ }
+ }
+
public void scheduleItem(final ScheduledAgendaItem item,
final InternalWorkingMemory wm) {
throw new UnsupportedOperationException("rete only");
@@ -961,6 +982,7 @@ public int fireNextItem(final AgendaFilter filter,
garbageCollector.remove(item);
}
+ evaluateQueriesForRule(item);
localFireCount = item.getRuleExecutor().evaluateNetworkAndFire(this.workingMemory, filter,
fireCount, fireLimit);
if ( localFireCount == 0 ) {
@@ -985,13 +1007,29 @@ public int fireNextItem(final AgendaFilter filter,
public void evaluateEagerList() {
synchronized (eager) {
while ( !eager.isEmpty() ) {
- RuleExecutor ruleExecutor = eager.removeFirst().getRuleExecutor();
+ RuleAgendaItem item = eager.removeFirst();
+ evaluateQueriesForRule(item);
+ RuleExecutor ruleExecutor = item.getRuleExecutor();
ruleExecutor.flushTupleQueue(ruleExecutor.getPathMemory().getStreamQueue());
ruleExecutor.evaluateNetwork(this.workingMemory);
}
}
}
+ private void evaluateQueriesForRule(RuleAgendaItem item) {
+ RuleImpl rule = item.getRule();
+ if (!rule.isQuery()) {
+ for (QueryImpl query : rule.getDependingQueries()) {
+ RuleAgendaItem queryAgendaItem = queries.remove(query);
+ if (queryAgendaItem != null) {
+ RuleExecutor ruleExecutor = queryAgendaItem.getRuleExecutor();
+ ruleExecutor.flushTupleQueue(ruleExecutor.getPathMemory().getStreamQueue());
+ ruleExecutor.evaluateNetwork(this.workingMemory);
+ }
+ }
+ }
+ }
+
public int sizeOfRuleFlowGroup(String name) {
InternalAgendaGroup group = agendaGroups.get(name);
if (group == null) {
@@ -1145,7 +1183,7 @@ private boolean checkProcessInstance(Activation activation,
long processInstanceId) {
final Map<String, Declaration> declarations = activation.getSubRule().getOuterDeclarations();
for ( Declaration declaration : declarations.values() ) {
- if ( "processInstance".equals( declaration.getIdentifier() ) ) {
+ if ( "processInstance".equals(declaration.getIdentifier()) ) {
Object value = declaration.getValue( workingMemory,
activation.getTuple().get( declaration ).getObject() );
if ( value instanceof ProcessInstance ) {
@@ -1176,7 +1214,7 @@ public void stageLeftTuple(RuleAgendaItem ruleAgendaItem, AgendaItem justified)
}
public void fireUntilHalt() {
- fireUntilHalt( null );
+ fireUntilHalt(null);
}
public void fireUntilHalt(final AgendaFilter agendaFilter) {
diff --git a/drools-core/src/main/java/org/drools/core/common/InternalAgenda.java b/drools-core/src/main/java/org/drools/core/common/InternalAgenda.java
index 50e33e85cf1..5bbc459b16b 100644
--- a/drools-core/src/main/java/org/drools/core/common/InternalAgenda.java
+++ b/drools-core/src/main/java/org/drools/core/common/InternalAgenda.java
@@ -289,9 +289,11 @@ RuleAgendaItem createRuleAgendaItem(final int salience,
void addAgendaItemToGroup(AgendaItem item);
void addEagerRuleAgendaItem(RuleAgendaItem item);
-
void removeEagerRuleAgendaItem(RuleAgendaItem item);
+ void addQueryAgendaItem(final RuleAgendaItem item);
+ void removeQueryAgendaItem(final RuleAgendaItem item);
+
long getNextActivationCounter();
/*
diff --git a/drools-core/src/main/java/org/drools/core/definitions/rule/impl/RuleImpl.java b/drools-core/src/main/java/org/drools/core/definitions/rule/impl/RuleImpl.java
index bb0240e0c97..f157250f6ec 100644
--- a/drools-core/src/main/java/org/drools/core/definitions/rule/impl/RuleImpl.java
+++ b/drools-core/src/main/java/org/drools/core/definitions/rule/impl/RuleImpl.java
@@ -28,6 +28,7 @@
import org.drools.core.rule.InvalidPatternException;
import org.drools.core.rule.LogicTransformer;
import org.drools.core.rule.Pattern;
+import org.drools.core.rule.QueryImpl;
import org.drools.core.rule.RuleConditionElement;
import org.drools.core.spi.AgendaGroup;
import org.drools.core.spi.CompiledInvoker;
@@ -53,10 +54,13 @@
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
import java.util.Map;
public class RuleImpl implements Externalizable,
@@ -136,6 +140,10 @@ public class RuleImpl implements Externalizable,
private ConsequenceMetaData consequenceMetaData = new ConsequenceMetaData();
+ private List<QueryImpl> usedQueries;
+
+ private List<QueryImpl> dependingQueries;
+
public RuleImpl() {
}
@@ -159,7 +167,6 @@ public RuleImpl(final String name,
this.salience = SalienceInteger.DEFAULT_SALIENCE;
this.metaAttributes = new HashMap<String, Object>();
setActivationListener( "agenda" );
-
}
/**
@@ -219,6 +226,7 @@ public void writeExternal(ObjectOutput out) throws IOException {
out.writeObject( activationListener );
out.writeObject( consequenceMetaData );
out.writeBoolean( eager );
+ out.writeObject( usedQueries );
}
@SuppressWarnings("unchecked")
@@ -256,6 +264,39 @@ public void readExternal(ObjectInput in) throws IOException,
activationListener = ( String ) in.readObject();
consequenceMetaData = ( ConsequenceMetaData ) in.readObject();
eager = in.readBoolean();
+ usedQueries = (List<QueryImpl>) in.readObject();
+ }
+
+ public void addUsedQuery(QueryImpl query) {
+ if (usedQueries == null) {
+ usedQueries = new ArrayList<QueryImpl>();
+ }
+ usedQueries.add(query);
+ }
+
+ /**
+ * Returns the lists of queries from which this rule (or query) depends on ordered
+ * by their relative dependencies, e.g. if R1 -> A -> B -> C (where the letter are queries)
+ * it will return [C, B, A]
+ */
+ public List<QueryImpl> getDependingQueries() {
+ if (dependingQueries == null) {
+ dependingQueries = usedQueries == null ? Collections.<QueryImpl>emptyList() : collectDependingQueries(new LinkedList<QueryImpl>());
+ }
+ return dependingQueries;
+ }
+
+ protected List<QueryImpl> collectDependingQueries(LinkedList<QueryImpl> accumulator) {
+ if (usedQueries == null) {
+ return accumulator;
+ }
+ for (QueryImpl query : usedQueries) {
+ if (!accumulator.contains(query)) {
+ accumulator.offerFirst(query);
+ query.collectDependingQueries(accumulator);
+ }
+ }
+ return accumulator;
}
public Resource getResource() {
diff --git a/drools-core/src/main/java/org/drools/core/phreak/RuleAgendaItem.java b/drools-core/src/main/java/org/drools/core/phreak/RuleAgendaItem.java
index 581cde7042c..73181307928 100644
--- a/drools-core/src/main/java/org/drools/core/phreak/RuleAgendaItem.java
+++ b/drools-core/src/main/java/org/drools/core/phreak/RuleAgendaItem.java
@@ -30,7 +30,7 @@ public RuleAgendaItem(final long activationNumber,
final TerminalNode rtn,
boolean declarativeAgendaEnabled,
InternalAgendaGroup agendaGroup) {
- super(activationNumber, tuple, salience, context, rtn, null, agendaGroup);
+ super(activationNumber, tuple, salience, context, rtn, agendaGroup);
executor = new RuleExecutor(pmem, this, declarativeAgendaEnabled);
}
diff --git a/drools-core/src/main/java/org/drools/core/phreak/RuleExecutor.java b/drools-core/src/main/java/org/drools/core/phreak/RuleExecutor.java
index 6f8143b94ce..7e5f266e6cb 100644
--- a/drools-core/src/main/java/org/drools/core/phreak/RuleExecutor.java
+++ b/drools-core/src/main/java/org/drools/core/phreak/RuleExecutor.java
@@ -206,7 +206,9 @@ public void removeRuleAgendaItemWhenEmpty(InternalWorkingMemory wm) {
log.trace("Removing RuleAgendaItem " + ruleAgendaItem);
}
ruleAgendaItem.remove();
- if (ruleAgendaItem.getRule().isEager()) {
+ if ( ruleAgendaItem.getRule().isQuery() ) {
+ ((InternalAgenda)wm.getAgenda()).removeQueryAgendaItem( ruleAgendaItem );
+ } else if ( ruleAgendaItem.getRule().isEager() ) {
((InternalAgenda) wm.getAgenda()).removeEagerRuleAgendaItem(ruleAgendaItem);
}
}
diff --git a/drools-core/src/main/java/org/drools/core/reteoo/PathMemory.java b/drools-core/src/main/java/org/drools/core/reteoo/PathMemory.java
index 698696e3e6f..524defd1d85 100644
--- a/drools-core/src/main/java/org/drools/core/reteoo/PathMemory.java
+++ b/drools-core/src/main/java/org/drools/core/reteoo/PathMemory.java
@@ -140,7 +140,7 @@ public void queueRuleAgendaItem(InternalWorkingMemory wm) {
return;
}
- if (!agendaItem.isQueued() && !agendaItem.isBlocked()) {
+ if ( !agendaItem.isQueued() && !agendaItem.isBlocked() ) {
if ( isLogTraceEnabled ) {
log.trace("Queue RuleAgendaItem {}", agendaItem);
}
@@ -148,10 +148,13 @@ public void queueRuleAgendaItem(InternalWorkingMemory wm) {
ag.add( agendaItem );
}
}
- if ( agendaItem.getRule().isEager() ) {
- // will return if already added
+
+ if ( agendaItem.getRule().isQuery() ) {
+ ((InternalAgenda)wm.getAgenda()).addQueryAgendaItem( agendaItem );
+ } else if ( agendaItem.getRule().isEager() ) {
((InternalAgenda)wm.getAgenda()).addEagerRuleAgendaItem( agendaItem );
}
+
agenda.notifyHalt();
}
diff --git a/drools-reteoo/src/main/java/org/drools/reteoo/common/ReteAgenda.java b/drools-reteoo/src/main/java/org/drools/reteoo/common/ReteAgenda.java
index 23e9b288661..433f7a6f754 100644
--- a/drools-reteoo/src/main/java/org/drools/reteoo/common/ReteAgenda.java
+++ b/drools-reteoo/src/main/java/org/drools/reteoo/common/ReteAgenda.java
@@ -324,6 +324,16 @@ public void removeEagerRuleAgendaItem(RuleAgendaItem item) {
eager.remove(item);
}
+ @Override
+ public void addQueryAgendaItem(final RuleAgendaItem item) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void removeQueryAgendaItem(RuleAgendaItem item) {
+ throw new UnsupportedOperationException();
+ }
+
/**
* Schedule an agenda item for delayed firing.
*
|
8ee465103850a3dca018273fe5952e40d5c45a66
|
spring-framework
|
Improve StringUtils.cleanPath--Issue: SPR-11793-
|
c
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-core/src/main/java/org/springframework/util/StringUtils.java b/spring-core/src/main/java/org/springframework/util/StringUtils.java
index 126cab69fcd3..b659486a19d0 100644
--- a/spring-core/src/main/java/org/springframework/util/StringUtils.java
+++ b/spring-core/src/main/java/org/springframework/util/StringUtils.java
@@ -622,7 +622,12 @@ public static String cleanPath(String path) {
String prefix = "";
if (prefixIndex != -1) {
prefix = pathToUse.substring(0, prefixIndex + 1);
- pathToUse = pathToUse.substring(prefixIndex + 1);
+ if (prefix.contains("/")) {
+ prefix = "";
+ }
+ else {
+ pathToUse = pathToUse.substring(prefixIndex + 1);
+ }
}
if (pathToUse.startsWith(FOLDER_SEPARATOR)) {
prefix = prefix + FOLDER_SEPARATOR;
diff --git a/spring-core/src/test/java/org/springframework/util/StringUtilsTests.java b/spring-core/src/test/java/org/springframework/util/StringUtilsTests.java
index b366ed7f96d0..c362a92ed529 100644
--- a/spring-core/src/test/java/org/springframework/util/StringUtilsTests.java
+++ b/spring-core/src/test/java/org/springframework/util/StringUtilsTests.java
@@ -299,6 +299,8 @@ public void testCleanPath() {
assertEquals("../mypath/myfile", StringUtils.cleanPath("../mypath/../mypath/myfile"));
assertEquals("../mypath/myfile", StringUtils.cleanPath("mypath/../../mypath/myfile"));
assertEquals("/../mypath/myfile", StringUtils.cleanPath("/../mypath/myfile"));
+ assertEquals("/mypath/myfile", StringUtils.cleanPath("/a/:b/../../mypath/myfile"));
+ assertEquals("file:///c:/path/to/the%20file.txt", StringUtils.cleanPath("file:///c:/some/../path/to/the%20file.txt"));
}
public void testPathEquals() {
|
3f04248a8a5af6d3e0e3cb7a5837764a52ecaf63
|
drools
|
more test coverage for nesting of OR CEs--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@4586 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
p
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/test/java/org/drools/lang/RuleParserTest.java b/drools-compiler/src/test/java/org/drools/lang/RuleParserTest.java
index 0d0b7d33188..3bc68a8e273 100644
--- a/drools-compiler/src/test/java/org/drools/lang/RuleParserTest.java
+++ b/drools-compiler/src/test/java/org/drools/lang/RuleParserTest.java
@@ -1056,8 +1056,8 @@ public void testBoundVariables() throws Exception {
assertFalse( parser.hasErrors() );
}
- public void FIXME_testOrNesting() throws Exception {
- final RuleParser parser = parseResource( "and_or_rule.drl" );
+ public void testOrNesting() throws Exception {
+ final RuleParser parser = parseResource( "or_nesting.drl" );
parser.compilation_unit();
final PackageDescr pack = parser.getPackageDescr();
@@ -1082,7 +1082,7 @@ public void FIXME_testOrNesting() throws Exception {
ColumnDescr left = (ColumnDescr) and.getDescrs().get( 0 );
assertEquals("Person", left.getObjectType());
- ColumnDescr right = (ColumnDescr) and.getDescrs().get( 0 );
+ ColumnDescr right = (ColumnDescr) and.getDescrs().get( 1 );
assertEquals("Cheese", right.getObjectType());
}
diff --git a/drools-compiler/src/test/resources/org/drools/lang/or_nesting.drl b/drools-compiler/src/test/resources/org/drools/lang/or_nesting.drl
new file mode 100644
index 00000000000..b55eca6729b
--- /dev/null
+++ b/drools-compiler/src/test/resources/org/drools/lang/or_nesting.drl
@@ -0,0 +1,10 @@
+
+import org.drools.Person
+
+rule simple_rule
+ when
+ Person(name == "mark") or
+ ( Person(type == "fan") and Cheese(type == "green") )
+ then
+ System.out.println( "Mark and Michael" + bar );
+end
\ No newline at end of file
|
62c048660bd3d4062e56646ae7c17b9a2ef15614
|
hbase
|
HBASE-10885 Support visibility expressions on- Deletes (Ram)--
|
a
|
https://github.com/apache/hbase
|
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityConstants.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityConstants.java
index 301161c1c8ba..bc84207b5cd2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityConstants.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityConstants.java
@@ -40,4 +40,13 @@ public final class VisibilityConstants {
/** Qualifier for the internal storage table for visibility labels */
public static final byte[] LABEL_QUALIFIER = new byte[1];
+ /**
+ * Visibility serialization version format. It indicates the visibility labels
+ * are sorted based on ordinal
+ **/
+ public static final byte VISIBILITY_SERIALIZATION_VERSION = 1;
+ /** Byte representation of the visibility_serialization_version **/
+ public static final byte[] SORTED_ORDINAL_SERIALIZATION_FORMAT = Bytes
+ .toBytes(VISIBILITY_SERIALIZATION_VERSION);
+
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index 811de1060886..376e07347bf2 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -428,6 +428,14 @@ public static boolean isDeleteFamilyVersion(final Cell cell) {
return cell.getTypeByte() == Type.DeleteFamilyVersion.getCode();
}
+ public static boolean isDeleteColumns(final Cell cell) {
+ return cell.getTypeByte() == Type.DeleteColumn.getCode();
+ }
+
+ public static boolean isDeleteColumnVersion(final Cell cell) {
+ return cell.getTypeByte() == Type.Delete.getCode();
+ }
+
/**
*
* @return True if this cell is a delete family or column type.
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagType.java
index 6c43c7802670..68018c4f4cd5 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagType.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagType.java
@@ -27,4 +27,5 @@ public final class TagType {
public static final byte ACL_TAG_TYPE = (byte) 1;
public static final byte VISIBILITY_TAG_TYPE = (byte) 2;
public static final byte LOG_REPLAY_TAG_TYPE = (byte) 3;
+ public static final byte VISIBILITY_EXP_SERIALIZATION_TAG_TYPE = (byte)4;
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LabelExpander.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LabelExpander.java
index 0c133f6f6b6d..60a3bc3767e7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LabelExpander.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LabelExpander.java
@@ -25,6 +25,7 @@
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -38,10 +39,12 @@
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.hbase.mapreduce.ImportTsv.TsvParser.BadTsvLineException;
import org.apache.hadoop.hbase.security.visibility.Authorizations;
import org.apache.hadoop.hbase.security.visibility.ExpressionExpander;
import org.apache.hadoop.hbase.security.visibility.ExpressionParser;
+import org.apache.hadoop.hbase.security.visibility.InvalidLabelException;
import org.apache.hadoop.hbase.security.visibility.ParseException;
import org.apache.hadoop.hbase.security.visibility.VisibilityUtils;
import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode;
@@ -49,7 +52,6 @@
import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode;
import org.apache.hadoop.hbase.security.visibility.expression.Operator;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.io.WritableUtils;
/**
* An utility class that helps the mapper and reducers used with visibility to
@@ -71,32 +73,37 @@ public LabelExpander(Configuration conf) {
// TODO : The code repeats from that in Visibility Controller.. Refactoring
// may be needed
- public List<Tag> createVisibilityTags(String visibilityLabelsExp) throws IOException,
- BadTsvLineException {
+ private List<Tag> createVisibilityTags(String visibilityLabelsExp) throws IOException,
+ ParseException, InvalidLabelException {
ExpressionNode node = null;
- try {
- node = parser.parse(visibilityLabelsExp);
- } catch (ParseException e) {
- throw new BadTsvLineException(e.getMessage());
- }
+ node = parser.parse(visibilityLabelsExp);
node = expander.expand(node);
List<Tag> tags = new ArrayList<Tag>();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
+ List<Integer> labelOrdinals = new ArrayList<Integer>();
+ // We will be adding this tag before the visibility tags and the presence of
+ // this
+ // tag indicates we are supporting deletes with cell visibility
+ tags.add(VisibilityUtils.VIS_SERIALIZATION_TAG);
if (node.isSingleNode()) {
- writeLabelOrdinalsToStream(node, dos);
+ getLabelOrdinals(node, labelOrdinals);
+ writeLabelOrdinalsToStream(labelOrdinals, dos);
tags.add(new Tag(VisibilityUtils.VISIBILITY_TAG_TYPE, baos.toByteArray()));
baos.reset();
} else {
NonLeafExpressionNode nlNode = (NonLeafExpressionNode) node;
if (nlNode.getOperator() == Operator.OR) {
for (ExpressionNode child : nlNode.getChildExps()) {
- writeLabelOrdinalsToStream(child, dos);
+ getLabelOrdinals(child, labelOrdinals);
+ writeLabelOrdinalsToStream(labelOrdinals, dos);
tags.add(new Tag(VisibilityUtils.VISIBILITY_TAG_TYPE, baos.toByteArray()));
baos.reset();
+ labelOrdinals.clear();
}
} else {
- writeLabelOrdinalsToStream(nlNode, dos);
+ getLabelOrdinals(nlNode, labelOrdinals);
+ writeLabelOrdinalsToStream(labelOrdinals, dos);
tags.add(new Tag(VisibilityUtils.VISIBILITY_TAG_TYPE, baos.toByteArray()));
baos.reset();
}
@@ -104,34 +111,38 @@ public List<Tag> createVisibilityTags(String visibilityLabelsExp) throws IOExcep
return tags;
}
- private void writeLabelOrdinalsToStream(ExpressionNode node, DataOutputStream dos)
- throws IOException, BadTsvLineException {
+ private void writeLabelOrdinalsToStream(List<Integer> labelOrdinals, DataOutputStream dos)
+ throws IOException {
+ Collections.sort(labelOrdinals);
+ for (Integer labelOrdinal : labelOrdinals) {
+ StreamUtils.writeRawVInt32(dos, labelOrdinal);
+ }
+ }
+
+ private void getLabelOrdinals(ExpressionNode node, List<Integer> labelOrdinals)
+ throws IOException, InvalidLabelException {
if (node.isSingleNode()) {
String identifier = null;
int labelOrdinal = 0;
if (node instanceof LeafExpressionNode) {
identifier = ((LeafExpressionNode) node).getIdentifier();
- if (this.labels.get(identifier) != null) {
- labelOrdinal = this.labels.get(identifier);
- }
+ labelOrdinal = this.labels.get(identifier);
} else {
// This is a NOT node.
LeafExpressionNode lNode = (LeafExpressionNode) ((NonLeafExpressionNode) node)
.getChildExps().get(0);
identifier = lNode.getIdentifier();
- if (this.labels.get(identifier) != null) {
- labelOrdinal = this.labels.get(identifier);
- labelOrdinal = -1 * labelOrdinal; // Store NOT node as -ve ordinal.
- }
+ labelOrdinal = this.labels.get(identifier);
+ labelOrdinal = -1 * labelOrdinal; // Store NOT node as -ve ordinal.
}
if (labelOrdinal == 0) {
- throw new BadTsvLineException("Invalid visibility label " + identifier);
+ throw new InvalidLabelException("Invalid visibility label " + identifier);
}
- WritableUtils.writeVInt(dos, labelOrdinal);
+ labelOrdinals.add(labelOrdinal);
} else {
List<ExpressionNode> childExps = ((NonLeafExpressionNode) node).getChildExps();
for (ExpressionNode child : childExps) {
- writeLabelOrdinalsToStream(child, dos);
+ getLabelOrdinals(child, labelOrdinals);
}
}
}
@@ -190,6 +201,7 @@ private void createLabels() throws IOException {
* @return KeyValue from the cell visibility expr
* @throws IOException
* @throws BadTsvLineException
+ * @throws ParseException
*/
public KeyValue createKVFromCellVisibilityExpr(int rowKeyOffset, int rowKeyLength, byte[] family,
int familyOffset, int familyLength, byte[] qualifier, int qualifierOffset,
@@ -201,10 +213,14 @@ public KeyValue createKVFromCellVisibilityExpr(int rowKeyOffset, int rowKeyLengt
KeyValue kv = null;
if (cellVisibilityExpr != null) {
// Apply the expansion and parsing here
- List<Tag> visibilityTags = createVisibilityTags(cellVisibilityExpr);
- kv = new KeyValue(lineBytes, rowKeyOffset, rowKeyLength, family, familyOffset, familyLength,
- qualifier, qualifierOffset, qualifierLength, ts, KeyValue.Type.Put, lineBytes, columnOffset,
- columnLength, visibilityTags);
+ try {
+ List<Tag> visibilityTags = createVisibilityTags(cellVisibilityExpr);
+ kv = new KeyValue(lineBytes, rowKeyOffset, rowKeyLength, family, familyOffset,
+ familyLength, qualifier, qualifierOffset, qualifierLength, ts, KeyValue.Type.Put,
+ lineBytes, columnOffset, columnLength, visibilityTags);
+ } catch (ParseException e) {
+ throw new BadTsvLineException("Parse Exception " + e.getMessage());
+ }
} else {
kv = new KeyValue(lineBytes, rowKeyOffset, rowKeyLength, family, familyOffset, familyLength,
qualifier, qualifierOffset, qualifierLength, ts, KeyValue.Type.Put, lineBytes, columnOffset,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DeleteTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DeleteTracker.java
index 514628d9732d..469d451e62bb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DeleteTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DeleteTracker.java
@@ -45,7 +45,7 @@ public interface DeleteTracker {
/**
* Check if the specified cell buffer has been deleted by a previously
* seen delete.
- * @param cell - current cell to check if deleted by a previously deleted cell
+ * @param cell - current cell to check if deleted by a previously seen delete
* @return deleteResult The result tells whether the KeyValue is deleted and why
*/
DeleteResult isDeleted(Cell cell);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index c1389386b9ff..2429ed5c001f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -2079,8 +2079,8 @@ void prepareDeleteTimestamps(Mutation mutation, Map<byte[], List<Cell>> familyMa
get.setMaxVersions(count);
get.addColumn(family, qual);
if (coprocessorHost != null) {
- if (!coprocessorHost.prePrepareTimeStampForDeleteVersion(mutation, cell, byteNow,
- get)) {
+ if (!coprocessorHost.prePrepareTimeStampForDeleteVersion(mutation, cell,
+ byteNow, get)) {
updateDeleteLatestVersionTimeStamp(kv, get, count, byteNow);
}
} else {
@@ -4759,7 +4759,7 @@ public Result get(final Get get) throws IOException {
* @param withCoprocessor invoke coprocessor or not. We don't want to
* always invoke cp for this private method.
*/
- private List<Cell> get(Get get, boolean withCoprocessor)
+ public List<Cell> get(Get get, boolean withCoprocessor)
throws IOException {
List<Cell> results = new ArrayList<Cell>();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanDeleteTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanDeleteTracker.java
index 6a767e807406..668e3db1e72a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanDeleteTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanDeleteTracker.java
@@ -45,14 +45,14 @@
@InterfaceAudience.Private
public class ScanDeleteTracker implements DeleteTracker {
- private boolean hasFamilyStamp = false;
- private long familyStamp = 0L;
- private SortedSet<Long> familyVersionStamps = new TreeSet<Long>();
- private byte [] deleteBuffer = null;
- private int deleteOffset = 0;
- private int deleteLength = 0;
- private byte deleteType = 0;
- private long deleteTimestamp = 0L;
+ protected boolean hasFamilyStamp = false;
+ protected long familyStamp = 0L;
+ protected SortedSet<Long> familyVersionStamps = new TreeSet<Long>();
+ protected byte [] deleteBuffer = null;
+ protected int deleteOffset = 0;
+ protected int deleteLength = 0;
+ protected byte deleteType = 0;
+ protected long deleteTimestamp = 0L;
/**
* Constructor for ScanDeleteTracker
@@ -65,7 +65,7 @@ public ScanDeleteTracker() {
* Add the specified KeyValue to the list of deletes to check against for
* this row operation.
* <p>
- * This is called when a Delete is encountered in a StoreFile.
+ * This is called when a Delete is encountered.
* @param cell - the delete cell
*/
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
index fe13430bd8ac..4501c0be9d3d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java
@@ -30,6 +30,7 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.BitSet;
+import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
@@ -75,6 +76,7 @@
import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.filter.FilterBase;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.util.StreamUtils;
@@ -93,6 +95,7 @@
import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse;
import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsService;
import org.apache.hadoop.hbase.regionserver.BloomType;
+import org.apache.hadoop.hbase.regionserver.DeleteTracker;
import org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
@@ -149,7 +152,6 @@ public class VisibilityController extends BaseRegionObserver implements MasterOb
private boolean acOn = false;
private Configuration conf;
private volatile boolean initialized = false;
-
/** Mapping of scanner instances to the user who created them */
private Map<InternalScanner,String> scannerOwners =
new MapMaker().weakKeys().makeMap();
@@ -167,6 +169,13 @@ public class VisibilityController extends BaseRegionObserver implements MasterOb
LABELS_TABLE_TAGS[0] = new Tag(VisibilityUtils.VISIBILITY_TAG_TYPE, baos.toByteArray());
}
+ // Add to this list if there are any reserved tag types
+ private static ArrayList<Byte> reservedVisTagTypes = new ArrayList<Byte>();
+ static {
+ reservedVisTagTypes.add(VisibilityUtils.VISIBILITY_TAG_TYPE);
+ reservedVisTagTypes.add(VisibilityUtils.VISIBILITY_EXP_SERIALIZATION_TAG_TYPE);
+ }
+
@Override
public void start(CoprocessorEnvironment env) throws IOException {
this.conf = env.getConfiguration();
@@ -690,10 +699,8 @@ public void preBatchMutate(ObserverContext<RegionCoprocessorEnvironment> c,
new OperationStatus(SANITY_CHECK_FAILURE, de.getMessage()));
continue;
}
- if (m instanceof Put) {
- Put p = (Put) m;
boolean sanityFailure = false;
- for (CellScanner cellScanner = p.cellScanner(); cellScanner.advance();) {
+ for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) {
if (!checkForReservedVisibilityTagPresence(cellScanner.current())) {
miniBatchOp.setOperationStatus(i, new OperationStatus(SANITY_CHECK_FAILURE,
"Mutation contains cell with reserved type tag"));
@@ -707,7 +714,7 @@ public void preBatchMutate(ObserverContext<RegionCoprocessorEnvironment> c,
List<Tag> visibilityTags = labelCache.get(labelsExp);
if (visibilityTags == null) {
try {
- visibilityTags = createVisibilityTags(labelsExp);
+ visibilityTags = createVisibilityTags(labelsExp, true);
} catch (ParseException e) {
miniBatchOp.setOperationStatus(i,
new OperationStatus(SANITY_CHECK_FAILURE, e.getMessage()));
@@ -719,7 +726,7 @@ public void preBatchMutate(ObserverContext<RegionCoprocessorEnvironment> c,
if (visibilityTags != null) {
labelCache.put(labelsExp, visibilityTags);
List<Cell> updatedCells = new ArrayList<Cell>();
- for (CellScanner cellScanner = p.cellScanner(); cellScanner.advance();) {
+ for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) {
Cell cell = cellScanner.current();
List<Tag> tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
@@ -732,22 +739,71 @@ public void preBatchMutate(ObserverContext<RegionCoprocessorEnvironment> c,
cell.getValueLength(), tags);
updatedCells.add(updatedCell);
}
- p.getFamilyCellMap().clear();
- // Clear and add new Cells to the Mutation.
- for (Cell cell : updatedCells) {
+ m.getFamilyCellMap().clear();
+ // Clear and add new Cells to the Mutation.
+ for (Cell cell : updatedCells) {
+ if (m instanceof Put) {
+ Put p = (Put) m;
p.add(cell);
+ } else if (m instanceof Delete) {
+ // TODO : Cells without visibility tags would be handled in follow up issue
+ Delete d = (Delete) m;
+ d.addDeleteMarker(cell);
}
}
}
}
- } else if (cellVisibility != null) {
- // CellVisibility in a Delete is not legal! Fail the operation
- miniBatchOp.setOperationStatus(i, new OperationStatus(SANITY_CHECK_FAILURE,
- "CellVisibility cannot be set on Delete mutation"));
}
}
}
+ @Override
+ public void prePrepareTimeStampForDeleteVersion(
+ ObserverContext<RegionCoprocessorEnvironment> ctx, Mutation delete, Cell cell,
+ byte[] byteNow, Get get) throws IOException {
+ KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
+ CellVisibility cellVisibility = null;
+ try {
+ cellVisibility = delete.getCellVisibility();
+ } catch (DeserializationException de) {
+ throw new IOException("Invalid cell visibility specified " + delete, de);
+ }
+ // The check for checkForReservedVisibilityTagPresence happens in preBatchMutate happens.
+ // It happens for every mutation and that would be enough.
+ List<Tag> visibilityTags = new ArrayList<Tag>();
+ if (cellVisibility != null) {
+ String labelsExp = cellVisibility.getExpression();
+ try {
+ visibilityTags = createVisibilityTags(labelsExp, false);
+ } catch (ParseException e) {
+ throw new IOException("Invalid cell visibility expression " + labelsExp, e);
+ } catch (InvalidLabelException e) {
+ throw new IOException("Invalid cell visibility specified " + labelsExp, e);
+ }
+ }
+ get.setFilter(new DeleteVersionVisibilityExpressionFilter(visibilityTags));
+ List<Cell> result = ctx.getEnvironment().getRegion().get(get, false);
+
+ if (result.size() < get.getMaxVersions()) {
+ // Nothing to delete
+ kv.updateLatestStamp(Bytes.toBytes(Long.MIN_VALUE));
+ return;
+ }
+ if (result.size() > get.getMaxVersions()) {
+ throw new RuntimeException("Unexpected size: " + result.size()
+ + ". Results more than the max versions obtained.");
+ }
+ KeyValue getkv = KeyValueUtil.ensureKeyValue(result.get(get.getMaxVersions() - 1));
+ Bytes.putBytes(kv.getBuffer(), kv.getTimestampOffset(), getkv.getBuffer(),
+ getkv.getTimestampOffset(), Bytes.SIZEOF_LONG);
+ // We are bypassing here because in the HRegion.updateDeleteLatestVersionTimeStamp we would
+ // update with the current timestamp after again doing a get. As the hook as already determined
+ // the needed timestamp we need to bypass here.
+ // TODO : See if HRegion.updateDeleteLatestVersionTimeStamp() could be
+ // called only if the hook is not called.
+ ctx.bypass();
+ }
+
@Override
public void postBatchMutate(ObserverContext<RegionCoprocessorEnvironment> c,
MiniBatchOperationInProgress<Mutation> miniBatchOp) throws IOException {
@@ -844,7 +900,7 @@ private boolean checkForReservedVisibilityTagPresence(Cell cell) throws IOExcept
Iterator<Tag> tagsItr = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
while (tagsItr.hasNext()) {
- if (tagsItr.next().getType() == VisibilityUtils.VISIBILITY_TAG_TYPE) {
+ if (reservedVisTagTypes.contains(tagsItr.next().getType())) {
return false;
}
}
@@ -852,28 +908,38 @@ private boolean checkForReservedVisibilityTagPresence(Cell cell) throws IOExcept
return true;
}
- private List<Tag> createVisibilityTags(String visibilityLabelsExp) throws IOException,
- ParseException, InvalidLabelException {
+ private List<Tag> createVisibilityTags(String visibilityLabelsExp, boolean addSerializationTag)
+ throws IOException, ParseException, InvalidLabelException {
ExpressionNode node = null;
node = this.expressionParser.parse(visibilityLabelsExp);
node = this.expressionExpander.expand(node);
List<Tag> tags = new ArrayList<Tag>();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
+ List<Integer> labelOrdinals = new ArrayList<Integer>();
+ // We will be adding this tag before the visibility tags and the presence of this
+ // tag indicates we are supporting deletes with cell visibility
+ if (addSerializationTag) {
+ tags.add(VisibilityUtils.VIS_SERIALIZATION_TAG);
+ }
if (node.isSingleNode()) {
- writeLabelOrdinalsToStream(node, dos);
+ getLabelOrdinals(node, labelOrdinals);
+ writeLabelOrdinalsToStream(labelOrdinals, dos);
tags.add(new Tag(VisibilityUtils.VISIBILITY_TAG_TYPE, baos.toByteArray()));
baos.reset();
} else {
NonLeafExpressionNode nlNode = (NonLeafExpressionNode) node;
if (nlNode.getOperator() == Operator.OR) {
for (ExpressionNode child : nlNode.getChildExps()) {
- writeLabelOrdinalsToStream(child, dos);
+ getLabelOrdinals(child, labelOrdinals);
+ writeLabelOrdinalsToStream(labelOrdinals, dos);
tags.add(new Tag(VisibilityUtils.VISIBILITY_TAG_TYPE, baos.toByteArray()));
baos.reset();
+ labelOrdinals.clear();
}
} else {
- writeLabelOrdinalsToStream(nlNode, dos);
+ getLabelOrdinals(nlNode, labelOrdinals);
+ writeLabelOrdinalsToStream(labelOrdinals, dos);
tags.add(new Tag(VisibilityUtils.VISIBILITY_TAG_TYPE, baos.toByteArray()));
baos.reset();
}
@@ -881,7 +947,15 @@ private List<Tag> createVisibilityTags(String visibilityLabelsExp) throws IOExce
return tags;
}
- private void writeLabelOrdinalsToStream(ExpressionNode node, DataOutputStream dos)
+ private void writeLabelOrdinalsToStream(List<Integer> labelOrdinals, DataOutputStream dos)
+ throws IOException {
+ Collections.sort(labelOrdinals);
+ for (Integer labelOrdinal : labelOrdinals) {
+ StreamUtils.writeRawVInt32(dos, labelOrdinal);
+ }
+ }
+
+ private void getLabelOrdinals(ExpressionNode node, List<Integer> labelOrdinals)
throws IOException, InvalidLabelException {
if (node.isSingleNode()) {
String identifier = null;
@@ -904,11 +978,11 @@ private void writeLabelOrdinalsToStream(ExpressionNode node, DataOutputStream do
if (labelOrdinal == 0) {
throw new InvalidLabelException("Invalid visibility label " + identifier);
}
- StreamUtils.writeRawVInt32(dos, labelOrdinal);
+ labelOrdinals.add(labelOrdinal);
} else {
List<ExpressionNode> childExps = ((NonLeafExpressionNode) node).getChildExps();
for (ExpressionNode child : childExps) {
- writeLabelOrdinalsToStream(child, dos);
+ getLabelOrdinals(child, labelOrdinals);
}
}
}
@@ -949,6 +1023,22 @@ private boolean checkIfScanOrGetFromSuperUser() throws IOException {
return false;
}
+ @Override
+ public DeleteTracker postInstantiateDeleteTracker(
+ ObserverContext<RegionCoprocessorEnvironment> ctx, DeleteTracker delTracker)
+ throws IOException {
+ HRegion region = ctx.getEnvironment().getRegion();
+ TableName table = region.getRegionInfo().getTable();
+ if (table.isSystemTable()) {
+ return delTracker;
+ }
+ // We are creating a new type of delete tracker here which is able to track
+ // the timestamps and also the
+ // visibility tags per cell. The covering cells are determined not only
+ // based on the delete type and ts
+ // but also on the visibility expression matching.
+ return new VisibilityScanDeleteTracker();
+ }
@Override
public RegionScanner postScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
final Scan scan, final RegionScanner s) throws IOException {
@@ -1126,7 +1216,7 @@ public Cell postMutationBeforeWAL(ObserverContext<RegionCoprocessorEnvironment>
}
}
try {
- tags.addAll(createVisibilityTags(cellVisibility.getExpression()));
+ tags.addAll(createVisibilityTags(cellVisibility.getExpression(), true));
} catch (ParseException e) {
throw new IOException(e);
}
@@ -1416,4 +1506,22 @@ private void checkCallingUserAuth() throws IOException {
}
}
}
+
+ static class DeleteVersionVisibilityExpressionFilter extends FilterBase {
+ private List<Tag> visibilityTags;
+
+ public DeleteVersionVisibilityExpressionFilter(List<Tag> visibilityTags) {
+ this.visibilityTags = visibilityTags;
+ }
+
+ @Override
+ public ReturnCode filterKeyValue(Cell kv) throws IOException {
+ boolean matchFound = VisibilityUtils.checkForMatchingVisibilityTags(kv, visibilityTags);
+ if (matchFound) {
+ return ReturnCode.INCLUDE;
+ } else {
+ return ReturnCode.SKIP;
+ }
+ }
+ }
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
new file mode 100644
index 000000000000..c151b747f123
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
@@ -0,0 +1,276 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.security.visibility;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValue.Type;
+import org.apache.hadoop.hbase.Tag;
+import org.apache.hadoop.hbase.regionserver.ScanDeleteTracker;
+import org.apache.hadoop.hbase.util.Bytes;
+
+/**
+ * Similar to ScanDeletTracker but tracks the visibility expression also before
+ * deciding if a Cell can be considered deleted
+ */
[email protected]
+public class VisibilityScanDeleteTracker extends ScanDeleteTracker {
+
+ // Its better to track the visibility tags in delete based on each type. Create individual
+ // data structures for tracking each of them. This would ensure that there is no tracking based
+ // on time and also would handle all cases where deletefamily or deletecolumns is specified with
+ // Latest_timestamp. In such cases the ts in the delete marker and the masking
+ // put will not be same. So going with individual data structures for different delete
+ // type would solve this problem and also ensure that the combination of different type
+ // of deletes with diff ts would also work fine
+ // Track per TS
+ private Map<Long, List<Tag>> visibilityTagsDeleteFamily = new HashMap<Long, List<Tag>>();
+ // Delete family version with different ts and different visibility expression could come.
+ // Need to track it per ts.
+ private Map<Long,List<Tag>> visibilityTagsDeleteFamilyVersion = new HashMap<Long, List<Tag>>();
+ private List<List<Tag>> visibilityTagsDeleteColumns;
+ // Tracking as List<List> is to handle same ts cell but different visibility tag.
+ // TODO : Need to handle puts with same ts but different vis tags.
+ private List<List<Tag>> visiblityTagsDeleteColumnVersion = new ArrayList<List<Tag>>();
+
+ public VisibilityScanDeleteTracker() {
+ super();
+ }
+
+ @Override
+ public void add(Cell delCell) {
+ //Cannot call super.add because need to find if the delete needs to be considered
+ long timestamp = delCell.getTimestamp();
+ int qualifierOffset = delCell.getQualifierOffset();
+ int qualifierLength = delCell.getQualifierLength();
+ byte type = delCell.getTypeByte();
+ if (type == KeyValue.Type.DeleteFamily.getCode()) {
+ hasFamilyStamp = true;
+ //familyStamps.add(delCell.getTimestamp());
+ extractDeleteTags(delCell, KeyValue.Type.DeleteFamily);
+ return;
+ } else if (type == KeyValue.Type.DeleteFamilyVersion.getCode()) {
+ familyVersionStamps.add(timestamp);
+ extractDeleteTags(delCell, KeyValue.Type.DeleteFamilyVersion);
+ return;
+ }
+ // new column, or more general delete type
+ if (deleteBuffer != null) {
+ if (Bytes.compareTo(deleteBuffer, deleteOffset, deleteLength, delCell.getQualifierArray(),
+ qualifierOffset, qualifierLength) != 0) {
+ // A case where there are deletes for a column qualifier but there are
+ // no corresponding puts for them. Rare case.
+ visibilityTagsDeleteColumns = null;
+ visiblityTagsDeleteColumnVersion = null;
+ } else if (type == KeyValue.Type.Delete.getCode() && (deleteTimestamp != timestamp)) {
+ // there is a timestamp change which means we could clear the list
+ // when ts is same and the vis tags are different we need to collect
+ // them all. Interesting part is that in the normal case of puts if
+ // there are 2 cells with same ts and diff vis tags only one of them is
+ // returned. Handling with a single List<Tag> would mean that only one
+ // of the cell would be considered. Doing this as a precaution.
+ // Rare cases.
+ visiblityTagsDeleteColumnVersion = null;
+ }
+ }
+ deleteBuffer = delCell.getQualifierArray();
+ deleteOffset = qualifierOffset;
+ deleteLength = qualifierLength;
+ deleteType = type;
+ deleteTimestamp = timestamp;
+ extractDeleteTags(delCell, KeyValue.Type.codeToType(type));
+ }
+
+ private void extractDeleteTags(Cell delCell, Type type) {
+ // If tag is present in the delete
+ if (delCell.getTagsLength() > 0) {
+ switch (type) {
+ case DeleteFamily:
+ List<Tag> delTags = new ArrayList<Tag>();
+ if (visibilityTagsDeleteFamily != null) {
+ VisibilityUtils.getVisibilityTags(delCell, delTags);
+ if (!delTags.isEmpty()) {
+ visibilityTagsDeleteFamily.put(delCell.getTimestamp(), delTags);
+ }
+ }
+ break;
+ case DeleteFamilyVersion:
+ delTags = new ArrayList<Tag>();
+ VisibilityUtils.getVisibilityTags(delCell, delTags);
+ if (!delTags.isEmpty()) {
+ visibilityTagsDeleteFamilyVersion.put(delCell.getTimestamp(), delTags);
+ }
+ break;
+ case DeleteColumn:
+ if (visibilityTagsDeleteColumns == null) {
+ visibilityTagsDeleteColumns = new ArrayList<List<Tag>>();
+ }
+ delTags = new ArrayList<Tag>();
+ VisibilityUtils.getVisibilityTags(delCell, delTags);
+ if (!delTags.isEmpty()) {
+ visibilityTagsDeleteColumns.add(delTags);
+ }
+ break;
+ case Delete:
+ if (visiblityTagsDeleteColumnVersion == null) {
+ visiblityTagsDeleteColumnVersion = new ArrayList<List<Tag>>();
+ }
+ delTags = new ArrayList<Tag>();
+ VisibilityUtils.getVisibilityTags(delCell, delTags);
+ if (!delTags.isEmpty()) {
+ visiblityTagsDeleteColumnVersion.add(delTags);
+ }
+ break;
+ default:
+ throw new IllegalArgumentException("Invalid delete type");
+ }
+ } else {
+ switch (type) {
+ case DeleteFamily:
+ visibilityTagsDeleteFamily = null;
+ break;
+ case DeleteFamilyVersion:
+ visibilityTagsDeleteFamilyVersion = null;
+ break;
+ case DeleteColumn:
+ visibilityTagsDeleteColumns = null;
+ break;
+ case Delete:
+ visiblityTagsDeleteColumnVersion = null;
+ break;
+ default:
+ throw new IllegalArgumentException("Invalid delete type");
+ }
+ }
+ }
+
+ @Override
+ public DeleteResult isDeleted(Cell cell) {
+ long timestamp = cell.getTimestamp();
+ int qualifierOffset = cell.getQualifierOffset();
+ int qualifierLength = cell.getQualifierLength();
+ if (hasFamilyStamp) {
+ if (visibilityTagsDeleteFamily != null) {
+ Set<Entry<Long, List<Tag>>> deleteFamilies = visibilityTagsDeleteFamily.entrySet();
+ Iterator<Entry<Long, List<Tag>>> iterator = deleteFamilies.iterator();
+ while (iterator.hasNext()) {
+ Entry<Long, List<Tag>> entry = iterator.next();
+ if (timestamp <= entry.getKey()) {
+ boolean matchFound = VisibilityUtils.checkForMatchingVisibilityTags(cell,
+ entry.getValue());
+ if (matchFound) {
+ return DeleteResult.FAMILY_VERSION_DELETED;
+ }
+ }
+ }
+ } else {
+ if (!VisibilityUtils.isVisibilityTagsPresent(cell)) {
+ // No tags
+ return DeleteResult.FAMILY_VERSION_DELETED;
+ }
+ }
+ }
+ if (familyVersionStamps.contains(Long.valueOf(timestamp))) {
+ if (visibilityTagsDeleteFamilyVersion != null) {
+ List<Tag> tags = visibilityTagsDeleteFamilyVersion.get(Long.valueOf(timestamp));
+ if (tags != null) {
+ boolean matchFound = VisibilityUtils.checkForMatchingVisibilityTags(cell, tags);
+ if (matchFound) {
+ return DeleteResult.FAMILY_VERSION_DELETED;
+ }
+ }
+ } else {
+ if (!VisibilityUtils.isVisibilityTagsPresent(cell)) {
+ // No tags
+ return DeleteResult.FAMILY_VERSION_DELETED;
+ }
+ }
+ }
+ if (deleteBuffer != null) {
+ int ret = Bytes.compareTo(deleteBuffer, deleteOffset, deleteLength, cell.getQualifierArray(),
+ qualifierOffset, qualifierLength);
+
+ if (ret == 0) {
+ if (deleteType == KeyValue.Type.DeleteColumn.getCode()) {
+ if (visibilityTagsDeleteColumns != null) {
+ for (List<Tag> tags : visibilityTagsDeleteColumns) {
+ boolean matchFound = VisibilityUtils.checkForMatchingVisibilityTags(cell,
+ tags);
+ if (matchFound) {
+ return DeleteResult.VERSION_DELETED;
+ }
+ }
+ } else {
+ if (!VisibilityUtils.isVisibilityTagsPresent(cell)) {
+ // No tags
+ return DeleteResult.VERSION_DELETED;
+ }
+ }
+ }
+ // Delete (aka DeleteVersion)
+ // If the timestamp is the same, keep this one
+ if (timestamp == deleteTimestamp) {
+ if (visiblityTagsDeleteColumnVersion != null) {
+ for (List<Tag> tags : visiblityTagsDeleteColumnVersion) {
+ boolean matchFound = VisibilityUtils.checkForMatchingVisibilityTags(cell,
+ tags);
+ if (matchFound) {
+ return DeleteResult.VERSION_DELETED;
+ }
+ }
+ } else {
+ if (!VisibilityUtils.isVisibilityTagsPresent(cell)) {
+ // No tags
+ return DeleteResult.VERSION_DELETED;
+ }
+ }
+ }
+ } else if (ret < 0) {
+ // Next column case.
+ deleteBuffer = null;
+ visibilityTagsDeleteColumns = null;
+ visiblityTagsDeleteColumnVersion = null;
+ } else {
+ throw new IllegalStateException("isDeleted failed: deleteBuffer="
+ + Bytes.toStringBinary(deleteBuffer, deleteOffset, deleteLength) + ", qualifier="
+ + Bytes.toStringBinary(cell.getQualifierArray(), qualifierOffset, qualifierLength)
+ + ", timestamp=" + timestamp + ", comparison result: " + ret);
+ }
+ }
+ return DeleteResult.NOT_DELETED;
+ }
+
+ @Override
+ public void reset() {
+ super.reset();
+ visibilityTagsDeleteColumns = null;
+ visibilityTagsDeleteFamily = new HashMap<Long, List<Tag>>();
+ visibilityTagsDeleteFamilyVersion = new HashMap<Long, List<Tag>>();
+ visiblityTagsDeleteColumnVersion = null;
+ }
+}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
index 0c7764ff1175..b87f59ad5210 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
@@ -19,25 +19,31 @@
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
-import com.google.protobuf.HBaseZeroCopyByteString;
-
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.MultiUserAuthorizations;
import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.UserAuthorizations;
import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabel;
import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsRequest;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.util.ReflectionUtils;
+import com.google.protobuf.HBaseZeroCopyByteString;
import com.google.protobuf.InvalidProtocolBufferException;
/**
@@ -46,10 +52,14 @@
@InterfaceAudience.Private
public class VisibilityUtils {
- public static final String VISIBILITY_LABEL_GENERATOR_CLASS =
+ public static final String VISIBILITY_LABEL_GENERATOR_CLASS =
"hbase.regionserver.scan.visibility.label.generator.class";
public static final byte VISIBILITY_TAG_TYPE = TagType.VISIBILITY_TAG_TYPE;
+ public static final byte VISIBILITY_EXP_SERIALIZATION_TAG_TYPE =
+ TagType.VISIBILITY_EXP_SERIALIZATION_TAG_TYPE;
public static final String SYSTEM_LABEL = "system";
+ public static final Tag VIS_SERIALIZATION_TAG = new Tag(VISIBILITY_EXP_SERIALIZATION_TAG_TYPE,
+ VisibilityConstants.SORTED_ORDINAL_SERIALIZATION_FORMAT);
private static final String COMMA = ",";
/**
@@ -156,4 +166,162 @@ public static List<ScanLabelGenerator> getScanLabelGenerators(Configuration conf
}
return slgs;
}
+
+ /**
+ * Get the list of visibility tags in the given cell
+ * @param cell - the cell
+ * @param tags - the tags array that will be populated if
+ * visibility tags are present
+ * @return true if the tags are in sorted order.
+ */
+ public static boolean getVisibilityTags(Cell cell, List<Tag> tags) {
+ boolean sortedOrder = false;
+ Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
+ cell.getTagsLength());
+ while (tagsIterator.hasNext()) {
+ Tag tag = tagsIterator.next();
+ if(tag.getType() == VisibilityUtils.VISIBILITY_EXP_SERIALIZATION_TAG_TYPE) {
+ int serializationVersion = Bytes.toShort(tag.getValue());
+ if (serializationVersion == VisibilityConstants.VISIBILITY_SERIALIZATION_VERSION) {
+ sortedOrder = true;
+ continue;
+ }
+ }
+ if (tag.getType() == VisibilityUtils.VISIBILITY_TAG_TYPE) {
+ tags.add(tag);
+ }
+ }
+ return sortedOrder;
+ }
+
+ /**
+ * Checks if the cell has a visibility tag
+ * @param cell
+ * @return true if found, false if not found
+ */
+ public static boolean isVisibilityTagsPresent(Cell cell) {
+ Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
+ cell.getTagsLength());
+ while (tagsIterator.hasNext()) {
+ Tag tag = tagsIterator.next();
+ if (tag.getType() == VisibilityUtils.VISIBILITY_TAG_TYPE) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Checks for the matching visibility labels in the delete mutation and
+ * the cell in consideration
+ * @param cell - the cell
+ * @param visibilityTagsInDeleteCell - that list of tags in the delete mutation
+ * (the specified Cell Visibility)
+ * @return true if matching tags are found
+ */
+ public static boolean checkForMatchingVisibilityTags(Cell cell,
+ List<Tag> visibilityTagsInDeleteCell) {
+ List<Tag> tags = new ArrayList<Tag>();
+ boolean sortedTags = getVisibilityTags(cell, tags);
+ if (tags.size() == 0) {
+ // Early out if there are no tags in the cell
+ return false;
+ }
+ if (sortedTags) {
+ return checkForMatchingVisibilityTagsWithSortedOrder(visibilityTagsInDeleteCell, tags);
+ } else {
+ try {
+ return checkForMatchingVisibilityTagsWithOutSortedOrder(cell, visibilityTagsInDeleteCell);
+ } catch (IOException e) {
+ // Should not happen
+ throw new RuntimeException("Exception while sorting the tags from the cell", e);
+ }
+ }
+ }
+
+ private static boolean checkForMatchingVisibilityTagsWithOutSortedOrder(Cell cell,
+ List<Tag> visibilityTagsInDeleteCell) throws IOException {
+ List<List<Integer>> sortedDeleteTags = sortTagsBasedOnOrdinal(
+ visibilityTagsInDeleteCell);
+ List<List<Integer>> sortedTags = sortTagsBasedOnOrdinal(cell);
+ return compareTagsOrdinals(sortedDeleteTags, sortedTags);
+ }
+
+ private static boolean checkForMatchingVisibilityTagsWithSortedOrder(
+ List<Tag> visibilityTagsInDeleteCell, List<Tag> tags) {
+ boolean matchFound = false;
+ if ((visibilityTagsInDeleteCell.size()) != tags.size()) {
+ // If the size does not match. Definitely we are not comparing the
+ // equal tags.
+ // Return false in that case.
+ return matchFound;
+ }
+ for (Tag tag : visibilityTagsInDeleteCell) {
+ matchFound = false;
+ for (Tag givenTag : tags) {
+ if (Bytes.equals(tag.getBuffer(), tag.getTagOffset(), tag.getTagLength(),
+ givenTag.getBuffer(), givenTag.getTagOffset(), givenTag.getTagLength())) {
+ matchFound = true;
+ break;
+ }
+ }
+ }
+ return matchFound;
+ }
+
+ private static List<List<Integer>> sortTagsBasedOnOrdinal(Cell cell) throws IOException {
+ Iterator<Tag> tagsItr = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
+ cell.getTagsLength());
+ List<List<Integer>> fullTagsList = new ArrayList<List<Integer>>();
+ while (tagsItr.hasNext()) {
+ Tag tag = tagsItr.next();
+ if (tag.getType() == VisibilityUtils.VISIBILITY_TAG_TYPE) {
+ getSortedTagOrdinals(fullTagsList, tag);
+ }
+ }
+ return fullTagsList;
+ }
+
+ private static List<List<Integer>> sortTagsBasedOnOrdinal(List<Tag> tags) throws IOException {
+ List<List<Integer>> fullTagsList = new ArrayList<List<Integer>>();
+ for (Tag tag : tags) {
+ if (tag.getType() == VisibilityUtils.VISIBILITY_TAG_TYPE) {
+ getSortedTagOrdinals(fullTagsList, tag);
+ }
+ }
+ return fullTagsList;
+ }
+
+ private static void getSortedTagOrdinals(List<List<Integer>> fullTagsList, Tag tag)
+ throws IOException {
+ List<Integer> tagsOrdinalInSortedOrder = new ArrayList<Integer>();
+ int offset = tag.getTagOffset();
+ int endOffset = offset + tag.getTagLength();
+ while (offset < endOffset) {
+ Pair<Integer, Integer> result = StreamUtils.readRawVarint32(tag.getBuffer(), offset);
+ tagsOrdinalInSortedOrder.add(result.getFirst());
+ offset += result.getSecond();
+ }
+ Collections.sort(tagsOrdinalInSortedOrder);
+ fullTagsList.add(tagsOrdinalInSortedOrder);
+ }
+
+ private static boolean compareTagsOrdinals(List<List<Integer>> tagsInDeletes,
+ List<List<Integer>> tags) {
+ boolean matchFound = false;
+ if (tagsInDeletes.size() != tags.size()) {
+ return matchFound;
+ } else {
+ for (List<Integer> deleteTagOrdinals : tagsInDeletes) {
+ matchFound = false;
+ for (List<Integer> tagOrdinals : tags) {
+ if (deleteTagOrdinals.equals(tagOrdinals)) {
+ matchFound = true;
+ break;
+ }
+ }
+ }
+ return matchFound;
+ }
+ }
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
index b5bd1a6b12cf..0c483aa080c3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
@@ -42,6 +42,7 @@
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.LargeTests;
+import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
@@ -50,6 +51,7 @@
import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.visibility.Authorizations;
+import org.apache.hadoop.hbase.security.visibility.CellVisibility;
import org.apache.hadoop.hbase.security.visibility.ScanLabelGenerator;
import org.apache.hadoop.hbase.security.visibility.SimpleScanLabelGenerator;
import org.apache.hadoop.hbase.security.visibility.VisibilityClient;
@@ -161,6 +163,58 @@ public void testMROnTable() throws Exception {
util.deleteTable(tableName);
}
+ @Test
+ public void testMROnTableWithDeletes() throws Exception {
+ String tableName = "test-" + UUID.randomUUID();
+
+ // Prepare the arguments required for the test.
+ String[] args = new String[] {
+ "-D" + ImportTsv.MAPPER_CONF_KEY + "=org.apache.hadoop.hbase.mapreduce.TsvImporterMapper",
+ "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY",
+ "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName };
+ String data = "KEY\u001bVALUE1\u001bVALUE2\u001bsecret&private\n";
+ util.createTable(tableName, FAMILY);
+ doMROnTableTest(util, FAMILY, data, args, 1);
+ issueDeleteAndVerifyData(tableName);
+ util.deleteTable(tableName);
+ }
+
+ private void issueDeleteAndVerifyData(String tableName) throws IOException {
+ LOG.debug("Validating table after delete.");
+ HTable table = new HTable(conf, tableName);
+ boolean verified = false;
+ long pause = conf.getLong("hbase.client.pause", 5 * 1000);
+ int numRetries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5);
+ for (int i = 0; i < numRetries; i++) {
+ try {
+ Delete d = new Delete(Bytes.toBytes("KEY"));
+ d.deleteFamily(Bytes.toBytes(FAMILY));
+ d.setCellVisibility(new CellVisibility("private&secret"));
+ table.delete(d);
+
+ Scan scan = new Scan();
+ // Scan entire family.
+ scan.addFamily(Bytes.toBytes(FAMILY));
+ scan.setAuthorizations(new Authorizations("secret", "private"));
+ ResultScanner resScanner = table.getScanner(scan);
+ Result[] next = resScanner.next(5);
+ assertEquals(0, next.length);
+ verified = true;
+ break;
+ } catch (NullPointerException e) {
+ // If here, a cell was empty. Presume its because updates came in
+ // after the scanner had been opened. Wait a while and retry.
+ }
+ try {
+ Thread.sleep(pause);
+ } catch (InterruptedException e) {
+ // continue
+ }
+ }
+ table.close();
+ assertTrue(verified);
+ }
+
@Test
public void testMROnTableWithBulkload() throws Exception {
String tableName = "test-" + UUID.randomUUID();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
new file mode 100644
index 000000000000..d3df952f4ab7
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
@@ -0,0 +1,3029 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.security.visibility;
+
+import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.io.InterruptedIOException;
+import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellScanner;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse;
+import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
+
+/**
+ * Tests visibility labels with deletes
+ */
+@Category(MediumTests.class)
+public class TestVisibilityLabelsWithDeletes {
+ private static final String TOPSECRET = "TOPSECRET";
+ private static final String PUBLIC = "PUBLIC";
+ private static final String PRIVATE = "PRIVATE";
+ private static final String CONFIDENTIAL = "CONFIDENTIAL";
+ private static final String SECRET = "SECRET";
+ public static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+ private static final byte[] row1 = Bytes.toBytes("row1");
+ private static final byte[] row2 = Bytes.toBytes("row2");
+ private final static byte[] fam = Bytes.toBytes("info");
+ private final static byte[] qual = Bytes.toBytes("qual");
+ private final static byte[] qual1 = Bytes.toBytes("qual1");
+ private final static byte[] qual2 = Bytes.toBytes("qual2");
+ private final static byte[] value = Bytes.toBytes("value");
+ private final static byte[] value1 = Bytes.toBytes("value1");
+ public static Configuration conf;
+
+ @Rule
+ public final TestName TEST_NAME = new TestName();
+ public static User SUPERUSER;
+
+ @BeforeClass
+ public static void setupBeforeClass() throws Exception {
+ // setup configuration
+ conf = TEST_UTIL.getConfiguration();
+ conf.setBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false);
+ conf.setInt("hfile.format.version", 3);
+ conf.set("hbase.coprocessor.master.classes", VisibilityController.class.getName());
+ conf.set("hbase.coprocessor.region.classes", VisibilityController.class.getName());
+ conf.setClass(VisibilityUtils.VISIBILITY_LABEL_GENERATOR_CLASS, SimpleScanLabelGenerator.class,
+ ScanLabelGenerator.class);
+ conf.set("hbase.superuser", "admin");
+ TEST_UTIL.startMiniCluster(2);
+ SUPERUSER = User.createUserForTesting(conf, "admin", new String[] { "supergroup" });
+
+ // Wait for the labels table to become available
+ TEST_UTIL.waitTableEnabled(LABELS_TABLE_NAME.getName(), 50000);
+ addLabels();
+ }
+
+ @AfterClass
+ public static void tearDownAfterClass() throws Exception {
+ TEST_UTIL.shutdownMiniCluster();
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ }
+
+ @Test
+ public void testVisibilityLabelsWithDeleteColumns() throws Throwable {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ final HTable table = createTableAndWriteDataWithLabels(tableName, SECRET + "&" + TOPSECRET,
+ SECRET);
+ try {
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ HTable table = null;
+ try {
+ table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(TOPSECRET + "&" + SECRET));
+ d.deleteColumns(fam, qual);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ } finally {
+ table.close();
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Scan s = new Scan();
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 1);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testVisibilityLabelsWithDeleteFamily() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ final HTable table = createTableAndWriteDataWithLabels(tableName, SECRET, CONFIDENTIAL + "|"
+ + TOPSECRET);
+ try {
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row2);
+ d.setCellVisibility(new CellVisibility(TOPSECRET + "|" + CONFIDENTIAL));
+ d.deleteFamily(fam);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Scan s = new Scan();
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 1);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testVisibilityLabelsWithDeleteFamilyVersion() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ long[] ts = new long[] { 123l, 125l };
+ final HTable table = createTableAndWriteDataWithLabels(tableName, ts, CONFIDENTIAL + "|"
+ + TOPSECRET, SECRET);
+ try {
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ HTable table = null;
+ try {
+ table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(TOPSECRET + "|" + CONFIDENTIAL));
+ d.deleteFamilyVersion(fam, 123l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ } finally {
+ table.close();
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Scan s = new Scan();
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 1);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testVisibilityLabelsWithDeleteColumnExactVersion() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ long[] ts = new long[] { 123l, 125l };
+ final HTable table = createTableAndWriteDataWithLabels(tableName, ts, CONFIDENTIAL + "|"
+ + TOPSECRET, SECRET);
+ try {
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ HTable table = null;
+ try {
+ table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(TOPSECRET + "|" + CONFIDENTIAL));
+ d.deleteColumn(fam, qual, 123l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ } finally {
+ table.close();
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Scan s = new Scan();
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 1);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testVisibilityLabelsWithDeleteColumnsWithMultipleVersions() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ table = doPuts(tableName);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|(" +
+ SECRET + "&" + TOPSECRET+")"));
+ d.deleteColumns(fam, qual, 125l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 2);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 125l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testVisibilityLabelsWithDeleteColumnsWithMultipleVersionsNoTimestamp()
+ throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ table = doPuts(tableName);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ d.deleteColumns(fam, qual);
+ table.delete(d);
+
+ d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
+ d.deleteColumns(fam, qual);
+ table.delete(d);
+ table.flushCommits();
+
+ d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|("
+ + SECRET + "&" + TOPSECRET + ")"));
+ d.deleteColumns(fam, qual);
+ table.delete(d);
+ table.flushCommits();
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 1);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void
+ testVisibilityLabelsWithDeleteColumnsWithNoMatchVisExpWithMultipleVersionsNoTimestamp()
+ throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ table = doPuts(tableName);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ d.deleteColumns(fam, qual);
+ table.delete(d);
+
+ d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET));
+ d.deleteColumns(fam, qual);
+ table.delete(d);
+ table.flushCommits();
+
+ d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|("
+ + SECRET + "&" + TOPSECRET + ")"));
+ d.deleteColumns(fam, qual);
+ table.delete(d);
+ table.flushCommits();
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 2);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testVisibilityLabelsWithDeleteFamilyWithMultipleVersionsNoTimestamp()
+ throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ table = doPuts(tableName);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ d.deleteFamily(fam);
+ table.delete(d);
+
+ d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
+ d.deleteFamily(fam);
+ table.delete(d);
+ table.flushCommits();
+
+ d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|("
+ + SECRET + "&" + TOPSECRET + ")"));
+ d.deleteFamily(fam);
+ table.delete(d);
+ table.flushCommits();
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 1);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testVisibilityLabelsWithDeleteFamilyWithPutsReAppearing() throws Exception {
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ HBaseAdmin hBaseAdmin = TEST_UTIL.getHBaseAdmin();
+ HColumnDescriptor colDesc = new HColumnDescriptor(fam);
+ colDesc.setMaxVersions(5);
+ HTableDescriptor desc = new HTableDescriptor(tableName);
+ desc.addFamily(colDesc);
+ hBaseAdmin.createTable(desc);
+ table = new HTable(conf, tableName);
+ Put put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, value);
+ put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ table.put(put);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, value);
+ put.setCellVisibility(new CellVisibility(SECRET));
+ table.put(put);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ d.deleteFamily(fam);
+ table.delete(d);
+ table.flushCommits();
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertEquals(next.length, 1);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, value1);
+ put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ table.put(put);
+ actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET));
+ d.deleteFamily(fam);
+ table.delete(d);
+ table.flushCommits();
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(CONFIDENTIAL));
+ scanner = table.getScanner(s);
+ next = scanner.next(3);
+ assertEquals(next.length, 1);
+ s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET));
+ scanner = table.getScanner(s);
+ Result[] next1 = scanner.next(3);
+ assertEquals(next1.length, 0);
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testVisibilityLabelsWithDeleteColumnsWithPutsReAppearing() throws Exception {
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ HBaseAdmin hBaseAdmin = TEST_UTIL.getHBaseAdmin();
+ HColumnDescriptor colDesc = new HColumnDescriptor(fam);
+ colDesc.setMaxVersions(5);
+ HTableDescriptor desc = new HTableDescriptor(tableName);
+ desc.addFamily(colDesc);
+ hBaseAdmin.createTable(desc);
+ table = new HTable(conf, tableName);
+ Put put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, value);
+ put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ table.put(put);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, value);
+ put.setCellVisibility(new CellVisibility(SECRET));
+ table.put(put);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ d.deleteColumns(fam, qual);
+ table.delete(d);
+ table.flushCommits();
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertEquals(next.length, 1);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, value1);
+ put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ table.put(put);
+ actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET));
+ d.deleteColumns(fam, qual);
+ table.delete(d);
+ table.flushCommits();
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(CONFIDENTIAL));
+ scanner = table.getScanner(s);
+ next = scanner.next(3);
+ assertEquals(next.length, 1);
+ s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET));
+ scanner = table.getScanner(s);
+ Result[] next1 = scanner.next(3);
+ assertEquals(next1.length, 0);
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testVisibilityCombinations() throws Exception {
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ HBaseAdmin hBaseAdmin = TEST_UTIL.getHBaseAdmin();
+ HColumnDescriptor colDesc = new HColumnDescriptor(fam);
+ colDesc.setMaxVersions(5);
+ HTableDescriptor desc = new HTableDescriptor(tableName);
+ desc.addFamily(colDesc);
+ hBaseAdmin.createTable(desc);
+ table = new HTable(conf, tableName);
+ Put put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 123l, value);
+ put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ table.put(put);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 124l, value1);
+ put.setCellVisibility(new CellVisibility(SECRET));
+ table.put(put);
+ table.flushCommits();
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET));
+ d.deleteColumns(fam, qual, 126l);
+ table.delete(d);
+
+ table = new HTable(conf, TEST_NAME.getMethodName());
+ d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ d.deleteColumn(fam, qual, 123l);
+ table.delete(d);
+ table.flushCommits();
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(CONFIDENTIAL, SECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertEquals(next.length, 0);
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+ @Test
+ public void testVisibilityLabelsWithDeleteColumnWithSpecificVersionWithPutsReAppearing()
+ throws Exception {
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ HBaseAdmin hBaseAdmin = TEST_UTIL.getHBaseAdmin();
+ HColumnDescriptor colDesc = new HColumnDescriptor(fam);
+ colDesc.setMaxVersions(5);
+ HTableDescriptor desc = new HTableDescriptor(tableName);
+ desc.addFamily(colDesc);
+ hBaseAdmin.createTable(desc);
+ table = new HTable(conf, tableName);
+ Put put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 123l, value);
+ put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ table.put(put);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 123l, value1);
+ put.setCellVisibility(new CellVisibility(SECRET));
+ table.put(put);
+ table.flushCommits();
+ //TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(CONFIDENTIAL, SECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertEquals(next.length, 1);
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ d.deleteColumn(fam, qual, 123l);
+ table.delete(d);
+
+ table = new HTable(conf, TEST_NAME.getMethodName());
+ d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET));
+ d.deleteColumn(fam, qual, 123l);
+ table.delete(d);
+ table.flushCommits();
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(CONFIDENTIAL));
+ scanner = table.getScanner(s);
+ next = scanner.next(3);
+ assertEquals(next.length, 0);
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void
+ testVisibilityLabelsWithDeleteFamilyWithNoMatchingVisExpWithMultipleVersionsNoTimestamp()
+ throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ table = doPuts(tableName);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ d.deleteFamily(fam);
+ table.delete(d);
+
+ d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET));
+ d.deleteFamily(fam);
+ table.delete(d);
+ table.flushCommits();
+
+ d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|("
+ + SECRET + "&" + TOPSECRET + ")"));
+ d.deleteFamily(fam);
+ table.delete(d);
+ table.flushCommits();
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 2);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testDeleteFamilyAndDeleteColumnsWithAndWithoutVisibilityExp() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ table = doPuts(tableName);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.deleteFamily(fam);
+ table.delete(d);
+
+ d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
+ d.deleteColumns(fam, qual);
+ table.delete(d);
+ table.flushCommits();
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 2);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ private HTable doPuts(TableName tableName) throws IOException, InterruptedIOException,
+ RetriesExhaustedWithDetailsException, InterruptedException {
+ HTable table;
+ HBaseAdmin hBaseAdmin = TEST_UTIL.getHBaseAdmin();
+ HColumnDescriptor colDesc = new HColumnDescriptor(fam);
+ colDesc.setMaxVersions(5);
+ HTableDescriptor desc = new HTableDescriptor(tableName);
+ desc.addFamily(colDesc);
+ hBaseAdmin.createTable(desc);
+ table = new HTable(conf, tableName);
+ Put put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 123l, value);
+ put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ table.put(put);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 124l, value);
+ put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ + TOPSECRET + "&" + SECRET+")"));
+ table.put(put);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 125l, value);
+ put.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
+ table.put(put);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 126l, value);
+ put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ + TOPSECRET + "&" + SECRET+")"));
+ table.put(put);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 127l, value);
+ put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ + TOPSECRET + "&" + SECRET+")"));
+ table.put(put);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ put = new Put(Bytes.toBytes("row2"));
+ put.add(fam, qual, 127l, value);
+ put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|(" + TOPSECRET
+ + "&" + SECRET + ")"));
+ table.put(put);
+ return table;
+ }
+
+ private HTable doPutsWithDiffCols(TableName tableName) throws IOException,
+ InterruptedIOException, RetriesExhaustedWithDetailsException, InterruptedException {
+ HTable table;
+ HBaseAdmin hBaseAdmin = TEST_UTIL.getHBaseAdmin();
+ HColumnDescriptor colDesc = new HColumnDescriptor(fam);
+ colDesc.setMaxVersions(5);
+ HTableDescriptor desc = new HTableDescriptor(tableName);
+ desc.addFamily(colDesc);
+ hBaseAdmin.createTable(desc);
+ table = new HTable(conf, tableName);
+ Put put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 123l, value);
+ put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ table.put(put);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 124l, value);
+ put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ + TOPSECRET + "&" + SECRET+")"));
+ table.put(put);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 125l, value);
+ put.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
+ table.put(put);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual1, 126l, value);
+ put.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
+ table.put(put);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual2, 127l, value);
+ put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ + TOPSECRET + "&" + SECRET+")"));
+ table.put(put);
+ return table;
+ }
+
+ private HTable doPutsWithoutVisibility(TableName tableName) throws IOException,
+ InterruptedIOException, RetriesExhaustedWithDetailsException, InterruptedException {
+ HTable table;
+ HBaseAdmin hBaseAdmin = TEST_UTIL.getHBaseAdmin();
+ HColumnDescriptor colDesc = new HColumnDescriptor(fam);
+ colDesc.setMaxVersions(5);
+ HTableDescriptor desc = new HTableDescriptor(tableName);
+ desc.addFamily(colDesc);
+ hBaseAdmin.createTable(desc);
+ table = new HTable(conf, tableName);
+ Put put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 123l, value);
+ table.put(put);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 124l, value);
+ table.put(put);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 125l, value);
+ table.put(put);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 126l, value);
+ table.put(put);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 127l, value);
+ table.put(put);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ put = new Put(Bytes.toBytes("row2"));
+ put.add(fam, qual, 127l, value);
+ table.put(put);
+ return table;
+ }
+
+
+ @Test
+ public void testDeleteColumnWithSpecificTimeStampUsingMultipleVersionsUnMatchingVisExpression()
+ throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ table = doPuts(tableName);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|(" +
+ SECRET + "&" + TOPSECRET+")"));
+ d.deleteColumn(fam, qual, 125l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 2);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 125l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testDeleteColumnWithLatestTimeStampUsingMultipleVersions() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ table = doPuts(tableName);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
+ d.deleteColumn(fam, qual);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 2);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test (timeout=180000)
+ public void testDeleteColumnWithLatestTimeStampWhenNoVersionMatches() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ table = doPuts(tableName);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Put put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 128l, value);
+ put.setCellVisibility(new CellVisibility(TOPSECRET));
+ table.put(put);
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET ));
+ d.deleteColumn(fam, qual);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 2);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 128l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 125l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 129l, value);
+ put.setCellVisibility(new CellVisibility(SECRET));
+ table.put(put);
+ table.flushCommits();
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ scanner = table.getScanner(s);
+ next = scanner.next(3);
+ assertTrue(next.length == 2);
+ cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 129l);
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+ @Test
+ public void testDeleteColumnWithLatestTimeStampUsingMultipleVersionsAfterCompaction()
+ throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ table = doPuts(tableName);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
+ d.deleteColumn(fam, qual);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Put put = new Put(Bytes.toBytes("row3"));
+ put.add(fam, qual, 127l, value);
+ put.setCellVisibility(new CellVisibility(CONFIDENTIAL + "&" + PRIVATE));
+ table.put(put);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ TEST_UTIL.getHBaseAdmin().majorCompact(tableName.getNameAsString());
+ // Sleep to ensure compaction happens. Need to do it in a better way
+ Thread.sleep(5000);
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 3);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testDeleteFamilyLatestTimeStampWithMulipleVersions() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ table = doPuts(tableName);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
+ d.deleteFamily(fam);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 2);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testDeleteColumnswithMultipleColumnsWithMultipleVersions() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ table = doPutsWithDiffCols(tableName);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
+ d.deleteColumns(fam, qual, 125l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 1);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
+ current.getQualifierLength(), qual1, 0, qual1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
+ current.getQualifierLength(), qual2, 0, qual2.length));
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testDeleteColumnsWithDiffColsAndTags() throws Exception {
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ HBaseAdmin hBaseAdmin = TEST_UTIL.getHBaseAdmin();
+ HColumnDescriptor colDesc = new HColumnDescriptor(fam);
+ colDesc.setMaxVersions(5);
+ HTableDescriptor desc = new HTableDescriptor(tableName);
+ desc.addFamily(colDesc);
+ hBaseAdmin.createTable(desc);
+ table = new HTable(conf, tableName);
+ Put put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual1, 125l, value);
+ put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ table.put(put);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual1, 126l, value);
+ put.setCellVisibility(new CellVisibility(SECRET));
+ table.put(put);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET));
+ d.deleteColumns(fam, qual, 126l);
+ table.delete(d);
+ d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ d.deleteColumns(fam, qual1, 125l);
+ table.delete(d);
+ table.flushCommits();
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertEquals(next.length, 1);
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+ @Test
+ public void testDeleteColumnsWithDiffColsAndTags1() throws Exception {
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ HBaseAdmin hBaseAdmin = TEST_UTIL.getHBaseAdmin();
+ HColumnDescriptor colDesc = new HColumnDescriptor(fam);
+ colDesc.setMaxVersions(5);
+ HTableDescriptor desc = new HTableDescriptor(tableName);
+ desc.addFamily(colDesc);
+ hBaseAdmin.createTable(desc);
+ table = new HTable(conf, tableName);
+ Put put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual1, 125l, value);
+ put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ table.put(put);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual1, 126l, value);
+ put.setCellVisibility(new CellVisibility(SECRET));
+ table.put(put);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET));
+ d.deleteColumns(fam, qual, 126l);
+ table.delete(d);
+ d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ d.deleteColumns(fam, qual1, 126l);
+ table.delete(d);
+ table.flushCommits();
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, CONFIDENTIAL));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertEquals(next.length, 1);
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+ @Test
+ public void testDeleteFamilyWithoutCellVisibilityWithMulipleVersions() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ table = doPutsWithoutVisibility(tableName);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.deleteFamily(fam);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 1);
+ // All cells wrt row1 should be deleted as we are not passing the Cell Visibility
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testDeleteFamilyLatestTimeStampWithMulipleVersionsWithoutCellVisibilityInPuts()
+ throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ table = doPutsWithoutVisibility(tableName);
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
+ d.deleteFamily(fam);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 2);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 125l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testDeleteFamilySpecificTimeStampWithMulipleVersions() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ table = doPuts(tableName);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|("
+ + SECRET + "&" + TOPSECRET + ")"));
+ d.deleteFamily(fam, 126l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(6);
+ assertTrue(next.length == 2);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 125l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testScanAfterCompaction() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ table = doPuts(tableName);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|(" +
+ SECRET + "&" + TOPSECRET+")"));
+ d.deleteFamily(fam, 126l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Put put = new Put(Bytes.toBytes("row3"));
+ put.add(fam, qual, 127l, value);
+ put.setCellVisibility(new CellVisibility(CONFIDENTIAL + "&" + PRIVATE));
+ table.put(put);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ TEST_UTIL.getHBaseAdmin().compact(tableName.getNameAsString());
+ Thread.sleep(5000);
+ // Sleep to ensure compaction happens. Need to do it in a better way
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 3);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testDeleteFamilySpecificTimeStampWithMulipleVersionsDoneTwice() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ // Do not flush here.
+ table = doPuts(tableName);
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|("
+ + TOPSECRET + "&" + SECRET+")"));
+ d.deleteFamily(fam, 125l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 2);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 125l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+
+ // Issue 2nd delete
+ actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ + TOPSECRET + "&" + SECRET+")"));
+ d.deleteFamily(fam, 127l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ scanner = table.getScanner(s);
+ next = scanner.next(3);
+ assertTrue(next.length == 2);
+ cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 125l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ assertEquals(current.getTimestamp(), 127l);
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testMultipleDeleteFamilyVersionWithDiffLabels() throws Exception {
+ PrivilegedExceptionAction<VisibilityLabelsResponse> action =
+ new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+ public VisibilityLabelsResponse run() throws Exception {
+ try {
+ return VisibilityClient.setAuths(conf, new String[] { CONFIDENTIAL, PRIVATE, SECRET },
+ SUPERUSER.getShortName());
+ } catch (Throwable e) {
+ }
+ return null;
+ }
+ };
+ VisibilityLabelsResponse response = SUPERUSER.runAs(action);
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = doPuts(tableName);
+ try {
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ d.deleteFamilyVersion(fam, 123l);
+ table.delete(d);
+ d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
+ d.deleteFamilyVersion(fam, 125l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(5);
+ assertTrue(next.length == 2);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test (timeout=180000)
+ public void testSpecificDeletesFollowedByDeleteFamily() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = doPuts(tableName);
+ try {
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ + TOPSECRET + "&" + SECRET + ")"));
+ d.deleteColumn(fam, qual, 126l);
+ table.delete(d);
+ d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
+ d.deleteFamilyVersion(fam, 125l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(5);
+ assertTrue(next.length == 2);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ // Issue 2nd delete
+ actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ d.deleteFamily(fam);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ scanner = table.getScanner(s);
+ next = scanner.next(5);
+ assertTrue(next.length == 2);
+ cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test(timeout = 180000)
+ public void testSpecificDeletesFollowedByDeleteFamily1() throws Exception {
+ PrivilegedExceptionAction<VisibilityLabelsResponse> action =
+ new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+ public VisibilityLabelsResponse run() throws Exception {
+ try {
+ return VisibilityClient.setAuths(conf, new String[] { CONFIDENTIAL, PRIVATE, SECRET },
+ SUPERUSER.getShortName());
+ } catch (Throwable e) {
+ }
+ return null;
+ }
+ };
+ VisibilityLabelsResponse response = SUPERUSER.runAs(action);
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = doPuts(tableName);
+ try {
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ + TOPSECRET + "&" + SECRET + ")"));
+ d.deleteColumn(fam, qual);
+ table.delete(d);
+
+ d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
+ d.deleteFamilyVersion(fam, 125l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(5);
+ assertTrue(next.length == 2);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ // Issue 2nd delete
+ actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ d.deleteFamily(fam);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ scanner = table.getScanner(s);
+ next = scanner.next(5);
+ assertTrue(next.length == 2);
+ cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testDeleteColumnSpecificTimeStampWithMulipleVersionsDoneTwice() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ // Do not flush here.
+ table = doPuts(tableName);
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
+ d.deleteColumn(fam, qual, 125l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 2);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+
+ // Issue 2nd delete
+ actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ + TOPSECRET + "&" + SECRET+")"));
+ d.deleteColumn(fam, qual, 127l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ scanner = table.getScanner(s);
+ next = scanner.next(3);
+ assertTrue(next.length == 2);
+ cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ assertEquals(current.getTimestamp(), 127l);
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testDeleteColumnSpecificTimeStampWithMulipleVersionsDoneTwice1() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ // Do not flush here.
+ table = doPuts(tableName);
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")" +
+ "|(" + TOPSECRET + "&" + SECRET + ")"));
+ d.deleteColumn(fam, qual, 127l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 2);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 125l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+
+ // Issue 2nd delete
+ actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
+ d.deleteColumn(fam, qual, 127l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ scanner = table.getScanner(s);
+ next = scanner.next(3);
+ assertTrue(next.length == 2);
+ cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 125l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ assertEquals(current.getTimestamp(), 127l);
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+ @Test
+ public void testDeleteColumnSpecificTimeStampWithMulipleVersionsDoneTwice2() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ // Do not flush here.
+ table = doPuts(tableName);
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|("
+ + TOPSECRET + "&" + SECRET+")"));
+ d.deleteColumn(fam, qual, 125l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 2);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 125l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+
+ // Issue 2nd delete
+ actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ + TOPSECRET + "&" + SECRET+")"));
+ d.deleteColumn(fam, qual, 127l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ scanner = table.getScanner(s);
+ next = scanner.next(3);
+ assertTrue(next.length == 2);
+ cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 125l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ assertEquals(current.getTimestamp(), 127l);
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+ @Test
+ public void testDeleteColumnAndDeleteFamilylSpecificTimeStampWithMulipleVersion()
+ throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ // Do not flush here.
+ table = doPuts(tableName);
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
+ d.deleteColumn(fam, qual, 125l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 2);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+
+ // Issue 2nd delete
+ actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ + TOPSECRET + "&" + SECRET+")"));
+ d.deleteFamily(fam, 124l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ scanner = table.getScanner(s);
+ next = scanner.next(3);
+ assertTrue(next.length == 2);
+ cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ assertEquals(current.getTimestamp(), 127l);
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ private void setAuths() throws IOException, InterruptedException {
+ PrivilegedExceptionAction<VisibilityLabelsResponse> action =
+ new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+ public VisibilityLabelsResponse run() throws Exception {
+ try {
+ return VisibilityClient.setAuths(conf, new String[] { CONFIDENTIAL, PRIVATE, SECRET,
+ TOPSECRET }, SUPERUSER.getShortName());
+ } catch (Throwable e) {
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(action);
+ }
+
+ @Test
+ public void testDiffDeleteTypesForTheSameCellUsingMultipleVersions() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ // Do not flush here.
+ table = doPuts(tableName);
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility("(" + PRIVATE + "&" + CONFIDENTIAL + ")|("
+ + TOPSECRET + "&" + SECRET+")"));
+ d.deleteColumns(fam, qual, 125l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 2);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 125l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+
+ // Issue 2nd delete
+ actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ + TOPSECRET + "&" + SECRET+")"));
+ d.deleteColumn(fam, qual, 127l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+ s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ scanner = table.getScanner(s);
+ next = scanner.next(3);
+ assertTrue(next.length == 2);
+ cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 125l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row2, 0, row2.length));
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ @Test
+ public void testDeleteColumnLatestWithNoCellVisibility() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ table = doPuts(tableName);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.deleteColumn(fam, qual, 125l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 2);
+ scanAll(next);
+ actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.deleteColumns(fam, qual, 125l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ scanner = table.getScanner(s);
+ next = scanner.next(3);
+ assertTrue(next.length == 2);
+ scanAll(next);
+
+ actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.deleteFamily(fam, 125l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ scanner = table.getScanner(s);
+ next = scanner.next(3);
+ assertTrue(next.length == 2);
+ scanAll(next);
+
+ actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.deleteFamily(fam);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ scanner = table.getScanner(s);
+ next = scanner.next(3);
+ assertTrue(next.length == 2);
+ scanAll(next);
+
+ actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.deleteColumns(fam, qual);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ scanner = table.getScanner(s);
+ next = scanner.next(3);
+ assertTrue(next.length == 2);
+ scanAll(next);
+
+ actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.deleteFamilyVersion(fam, 126l);
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ scanner = table.getScanner(s);
+ next = scanner.next(3);
+ assertTrue(next.length == 2);
+ scanAll(next);
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ private void scanAll(Result[] next) throws IOException {
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(),
+ row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 127l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(),
+ row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 126l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(),
+ row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 125l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(),
+ row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(),
+ row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ cellScanner = next[1].cellScanner();
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(),
+ row2, 0, row2.length));
+ }
+
+ @Test
+ public void testVisibilityExpressionWithNotEqualORCondition() throws Exception {
+ setAuths();
+ TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
+ HTable table = null;
+ try {
+ HBaseAdmin hBaseAdmin = TEST_UTIL.getHBaseAdmin();
+ HColumnDescriptor colDesc = new HColumnDescriptor(fam);
+ colDesc.setMaxVersions(5);
+ HTableDescriptor desc = new HTableDescriptor(tableName);
+ desc.addFamily(colDesc);
+ hBaseAdmin.createTable(desc);
+ table = new HTable(conf, tableName);
+ Put put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 123l, value);
+ put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
+ table.put(put);
+ put = new Put(Bytes.toBytes("row1"));
+ put.add(fam, qual, 124l, value);
+ put.setCellVisibility(new CellVisibility(CONFIDENTIAL + "|" + PRIVATE));
+ table.put(put);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ try {
+ HTable table = new HTable(conf, TEST_NAME.getMethodName());
+ Delete d = new Delete(row1);
+ d.deleteColumn(fam, qual, 124l);
+ d.setCellVisibility(new CellVisibility(PRIVATE ));
+ table.delete(d);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(actiona);
+
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ Scan s = new Scan();
+ s.setMaxVersions(5);
+ s.setAuthorizations(new Authorizations(SECRET, PRIVATE, CONFIDENTIAL, TOPSECRET));
+ ResultScanner scanner = table.getScanner(s);
+ Result[] next = scanner.next(3);
+ assertTrue(next.length == 1);
+ CellScanner cellScanner = next[0].cellScanner();
+ cellScanner.advance();
+ Cell current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 124l);
+ cellScanner.advance();
+ current = cellScanner.current();
+ assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
+ current.getRowLength(), row1, 0, row1.length));
+ assertEquals(current.getTimestamp(), 123l);
+ } finally {
+ if (table != null) {
+ table.close();
+ }
+ }
+ }
+
+ public static HTable createTableAndWriteDataWithLabels(TableName tableName, String... labelExps)
+ throws Exception {
+ HTable table = null;
+ table = TEST_UTIL.createTable(tableName, fam);
+ int i = 1;
+ List<Put> puts = new ArrayList<Put>();
+ for (String labelExp : labelExps) {
+ Put put = new Put(Bytes.toBytes("row" + i));
+ put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value);
+ put.setCellVisibility(new CellVisibility(labelExp));
+ puts.add(put);
+ table.put(put);
+ i++;
+ }
+ // table.put(puts);
+ return table;
+ }
+
+ public static HTable createTableAndWriteDataWithLabels(TableName tableName, long[] timestamp,
+ String... labelExps) throws Exception {
+ HTable table = null;
+ table = TEST_UTIL.createTable(tableName, fam);
+ int i = 1;
+ List<Put> puts = new ArrayList<Put>();
+ for (String labelExp : labelExps) {
+ Put put = new Put(Bytes.toBytes("row" + i));
+ put.add(fam, qual, timestamp[i - 1], value);
+ put.setCellVisibility(new CellVisibility(labelExp));
+ puts.add(put);
+ table.put(put);
+ TEST_UTIL.getHBaseAdmin().flush(tableName.getNameAsString());
+ i++;
+ }
+ return table;
+ }
+
+ public static void addLabels() throws Exception {
+ PrivilegedExceptionAction<VisibilityLabelsResponse> action =
+ new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
+ public VisibilityLabelsResponse run() throws Exception {
+ String[] labels = { SECRET, TOPSECRET, CONFIDENTIAL, PUBLIC, PRIVATE };
+ try {
+ VisibilityClient.addLabels(conf, labels);
+ } catch (Throwable t) {
+ throw new IOException(t);
+ }
+ return null;
+ }
+ };
+ SUPERUSER.runAs(action);
+ }
+}
|
9cd153c55aa071528a013ecd28014734346d4127
|
drools
|
Changes to resolve issues discovered in integration- tests--git-svn-id: https://svn.jboss.org/repos/labs/trunk/labs/jbossrules@3441 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/LeapsTest.java b/drools-compiler/src/test/java/org/drools/integrationtests/LeapsTest.java
index 4d01a0107dc..163b8d19d8e 100644
--- a/drools-compiler/src/test/java/org/drools/integrationtests/LeapsTest.java
+++ b/drools-compiler/src/test/java/org/drools/integrationtests/LeapsTest.java
@@ -1,6 +1,7 @@
package org.drools.integrationtests;
import java.io.InputStreamReader;
+import java.util.ArrayList;
import java.util.List;
import org.drools.Cheese;
@@ -16,53 +17,150 @@ protected RuleBase getRuleBase() throws Exception {
return new org.drools.leaps.RuleBaseImpl();
}
- /**
- * Leaps query requires fireAll run before any probing can be done. this
- * test mirrors one in IntegrationCases.java with addition of call to
- * workingMemory.fireAll to facilitate query execution
- */
+ /**
+ * Leaps query requires fireAll run before any probing can be done. this
+ * test mirrors one in IntegrationCases.java with addition of call to
+ * workingMemory.fireAll to facilitate query execution
+ */
public void testQuery() throws Exception {
PackageBuilder builder = new PackageBuilder();
- builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "simple_query_test.drl" ) ) );
+ builder.addPackageFromDrl(new InputStreamReader(getClass()
+ .getResourceAsStream("simple_query_test.drl")));
Package pkg = builder.getPackage();
-
+
RuleBase ruleBase = getRuleBase();
- ruleBase.addPackage( pkg );
+ ruleBase.addPackage(pkg);
WorkingMemory workingMemory = ruleBase.newWorkingMemory();
-
+
Cheese stilton = new Cheese("stinky", 5);
- workingMemory.assertObject( stilton );
- workingMemory.fireAllRules();
- List results = workingMemory.getQueryResults( "simple query" );
- assertEquals(1, results.size());
+ workingMemory.assertObject(stilton);
+ workingMemory.fireAllRules();// <=== the only difference from the base test case
+ List results = workingMemory.getQueryResults("simple query");
+ assertEquals(1, results.size());
}
-
+
+ /**
+ * leaps does not create activations upfront hence its inability to apply
+ * auto-focus predicate in the same way as reteoo does. activations in
+ * reteoo sense created in the order rules would fire based what used to be
+ * called conflict resolution.
+ *
+ * So, while agenda groups feature works it mirrors reteoo behaviour up to
+ * the point where auto-focus comes into play. At this point leaps and
+ * reteoo are different at which point auto-focus should "fire".
+ *
+ * the other problem that relates to the lack of activations before rules
+ * start firing is that agenda group is removed from focus stack when agenda
+ * group is empty. This also affects module / focus behaviour
+ */
public void testAgendaGroups() throws Exception {
- //not implemented yet
+ PackageBuilder builder = new PackageBuilder();
+ builder.addPackageFromDrl(new InputStreamReader(getClass()
+ .getResourceAsStream("test_AgendaGroups.drl")));
+ Package pkg = builder.getPackage();
+
+ RuleBase ruleBase = getRuleBase();
+ ruleBase.addPackage(pkg);
+ WorkingMemory workingMemory = ruleBase.newWorkingMemory();
+
+ List list = new ArrayList();
+ workingMemory.setGlobal("list", list);
+
+ Cheese brie = new Cheese("brie", 12);
+ workingMemory.assertObject(brie);
+
+ workingMemory.fireAllRules();
+
+ assertEquals(3, list.size());
+
+ assertEquals("MAIN", list.get(0)); // salience 10
+ assertEquals("group3", list.get(1)); // salience 5. set auto focus to
+ // group 3
+ // no group 3 activations at this point, pop it, next activation that
+ // can fire is MAIN
+ assertEquals("MAIN", list.get(2));
+ // assertEquals( "group2", list.get( 3 ) );
+ // assertEquals( "group4", list.get( 4 ) );
+ // assertEquals( "group1", list.get( 5 ) );
+ // assertEquals( "group3", list.get( 6 ) );
+ // assertEquals( "group1", list.get( 7 ) );
+
+ workingMemory.setFocus("group2");
+ workingMemory.fireAllRules();
+
+ assertEquals(4, list.size());
+ assertEquals("group2", list.get(3));
}
+ /**
+ * exception test are leaps specific due to the fact that left hand side of
+ * the rule is not being evaluated until fireAllRules is called. Otherwise
+ * the test cases are exactly the same
+ */
public void testEvalException() throws Exception {
- //not implemented yet
+ PackageBuilder builder = new PackageBuilder();
+ builder.addPackageFromDrl(new InputStreamReader(getClass()
+ .getResourceAsStream("test_EvalException.drl")));
+ Package pkg = builder.getPackage();
+
+ RuleBase ruleBase = getRuleBase();
+ ruleBase.addPackage(pkg);
+ WorkingMemory workingMemory = ruleBase.newWorkingMemory();
+
+ Cheese brie = new Cheese("brie", 12);
+
+ try {
+ workingMemory.assertObject(brie);
+ workingMemory.fireAllRules(); // <=== the only difference from the base test case
+ fail("Should throw an Exception from the Eval");
+ } catch (Exception e) {
+ assertEquals("this should throw an exception", e.getCause()
+ .getMessage());
+ }
}
public void testPredicateException() throws Exception {
- //not implemented yet
+ PackageBuilder builder = new PackageBuilder();
+ builder.addPackageFromDrl(new InputStreamReader(getClass()
+ .getResourceAsStream("test_PredicateException.drl")));
+ Package pkg = builder.getPackage();
+
+ RuleBase ruleBase = getRuleBase();
+ ruleBase.addPackage(pkg);
+ WorkingMemory workingMemory = ruleBase.newWorkingMemory();
+
+ Cheese brie = new Cheese("brie", 12);
+
+ try {
+ workingMemory.assertObject(brie);
+ workingMemory.fireAllRules(); // <=== the only difference from the base test case
+ fail("Should throw an Exception from the Predicate");
+ } catch (Exception e) {
+ assertEquals("this should throw an exception", e.getCause()
+ .getMessage());
+ }
}
public void testReturnValueException() throws Exception {
- //not implemented yet
- }
+ PackageBuilder builder = new PackageBuilder();
+ builder.addPackageFromDrl(new InputStreamReader(getClass()
+ .getResourceAsStream("test_ReturnValueException.drl")));
+ Package pkg = builder.getPackage();
- public void testDurationWithNoLoop() {
- //not implemented yet
- }
+ RuleBase ruleBase = getRuleBase();
+ ruleBase.addPackage(pkg);
+ WorkingMemory workingMemory = ruleBase.newWorkingMemory();
- public void testNoLoop() throws Exception {
- //not implemented yet
- }
-
- public void testDynamicFunction() {
- //ERRROR HERE !
+ Cheese brie = new Cheese("brie", 12);
+
+ try {
+ workingMemory.assertObject(brie);
+ workingMemory.fireAllRules(); // <=== the only difference from the base test case
+ fail("Should throw an Exception from the ReturnValue");
+ } catch (Exception e) {
+ assertEquals("this should throw an exception", e.getCause()
+ .getMessage());
+ }
}
}
diff --git a/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java b/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java
index 002eca68fb7..c75f8edb6b6 100644
--- a/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java
+++ b/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java
@@ -78,13 +78,11 @@ abstract public class AbstractWorkingMemory
/** The actual memory for the <code>JoinNode</code>s. */
private final PrimitiveLongMap nodeMemories = new PrimitiveLongMap( 32,
8 );
-
- /** Application data which is associated with this memory. */
- protected final Map applicationData = new HashMap();
-
/** Handle-to-object mapping. */
protected final PrimitiveLongMap objects = new PrimitiveLongMap( 32,
8 );
+ /** Global values which are associated with this memory. */
+ private final Map globals = new HashMap();
/** Object-to-handle mapping. */
protected final Map identityMap = new IdentityMap();
@@ -164,14 +162,14 @@ FactHandle newFactHandle() {
* @see WorkingMemory
*/
public Map getGlobals() {
- return this.applicationData;
+ return this.globals;
}
/**
* @see WorkingMemory
*/
public Object getGlobal(String name) {
- return this.applicationData.get( name );
+ return this.globals.get( name );
}
/**
diff --git a/drools-core/src/main/java/org/drools/leaps/FactTable.java b/drools-core/src/main/java/org/drools/leaps/FactTable.java
index 4be84e10208..83c0468c27e 100644
--- a/drools-core/src/main/java/org/drools/leaps/FactTable.java
+++ b/drools-core/src/main/java/org/drools/leaps/FactTable.java
@@ -20,8 +20,9 @@
import java.util.Iterator;
import java.util.Set;
+import org.drools.common.PropagationContextImpl;
import org.drools.leaps.util.Table;
-import org.drools.leaps.util.TableOutOfBoundException;
+import org.drools.spi.PropagationContext;
/**
* Implementation of a container to store data elements used throughout the
@@ -41,14 +42,13 @@ class FactTable extends Table {
* dynamic rule management support. used to push facts on stack again after
* fireAllRules by working memory and adding of a new rule after that
*/
- private boolean reseededStack = false;
+ private boolean reseededStack = false;
/**
- * Tuples that are either already on agenda or are very close (missing exists or
- * have not facts matching)
+ * Tuples that are either already on agenda or are very close (missing
+ * exists or have not facts matching)
*/
-
- private final Set tuples;
+ private final Set tuples;
/**
* initializes base LeapsTable with appropriate Comparator and positive and
@@ -58,8 +58,8 @@ class FactTable extends Table {
* @param ruleConflictResolver
*/
public FactTable(ConflictResolver conflictResolver) {
- super( conflictResolver.getFactConflictResolver() );
- this.rules = new RuleTable( conflictResolver.getRuleConflictResolver() );
+ super(conflictResolver.getFactConflictResolver());
+ this.rules = new RuleTable(conflictResolver.getRuleConflictResolver());
this.tuples = new HashSet();
}
@@ -69,11 +69,10 @@ public FactTable(ConflictResolver conflictResolver) {
* @param workingMemory
* @param ruleHandle
*/
- public void addRule(WorkingMemoryImpl workingMemory,
- RuleHandle ruleHandle) {
- this.rules.add( ruleHandle );
+ public void addRule(WorkingMemoryImpl workingMemory, RuleHandle ruleHandle) {
+ this.rules.add(ruleHandle);
// push facts back to stack if needed
- this.checkAndAddFactsToStack( workingMemory );
+ this.checkAndAddFactsToStack(workingMemory);
}
/**
@@ -82,7 +81,7 @@ public void addRule(WorkingMemoryImpl workingMemory,
* @param ruleHandle
*/
public void removeRule(RuleHandle ruleHandle) {
- this.rules.remove( ruleHandle );
+ this.rules.remove(ruleHandle);
}
/**
@@ -95,20 +94,21 @@ public void removeRule(RuleHandle ruleHandle) {
*
*/
private void checkAndAddFactsToStack(WorkingMemoryImpl workingMemory) {
- if ( this.reseededStack ) {
- this.setReseededStack( false );
+ if (this.reseededStack) {
+ this.setReseededStack(false);
+
+ PropagationContextImpl context = new PropagationContextImpl(
+ workingMemory.nextPropagationIdCounter(),
+ PropagationContext.ASSERTION, null, null);
+
// let's only add facts below waterline - added before rule is added
// rest would be added to stack automatically
- Handle factHandle = new FactHandleImpl( workingMemory.getIdLastFireAllAt(),
- null );
- try {
- for ( Iterator it = this.tailIterator( factHandle,
- factHandle ); it.hasNext(); ) {
- workingMemory.pushTokenOnStack( new Token( workingMemory,
- (FactHandleImpl) it.next() ) );
- }
- } catch ( TableOutOfBoundException e ) {
- // should never get here
+ Handle factHandle = new FactHandleImpl(workingMemory
+ .getIdLastFireAllAt(), null);
+ for (Iterator it = this.tailIterator(factHandle, factHandle); it
+ .hasNext();) {
+ workingMemory.pushTokenOnStack(new Token(workingMemory,
+ (FactHandleImpl) it.next(), context));
}
}
}
@@ -139,22 +139,23 @@ public Iterator getRulesIterator() {
public String toString() {
StringBuffer ret = new StringBuffer();
- for ( Iterator it = this.iterator(); it.hasNext(); ) {
+ for (Iterator it = this.iterator(); it.hasNext();) {
FactHandleImpl handle = (FactHandleImpl) it.next();
- ret.append( "\n" + handle + "[" + handle.getObject() + "]" );
+ ret.append("\n" + handle + "[" + handle.getObject() + "]");
}
- ret.append( "\nTuples :" );
+ ret.append("\nTuples :");
- for ( Iterator it = this.tuples.iterator(); it.hasNext(); ) {
- ret.append( "\n" + it.next() );
+ for (Iterator it = this.tuples.iterator(); it.hasNext();) {
+ ret.append("\n" + it.next());
}
- ret.append( "\nRules :" );
+ ret.append("\nRules :");
- for ( Iterator it = this.rules.iterator(); it.hasNext(); ) {
+ for (Iterator it = this.rules.iterator(); it.hasNext();) {
RuleHandle handle = (RuleHandle) it.next();
- ret.append( "\n\t" + handle.getLeapsRule().getRule().getName() + "[dominant - " + handle.getDominantPosition() + "]" );
+ ret.append("\n\t" + handle.getLeapsRule().getRule().getName()
+ + "[dominant - " + handle.getDominantPosition() + "]");
}
return ret.toString();
@@ -165,10 +166,10 @@ Iterator getTuplesIterator() {
}
boolean addTuple(LeapsTuple tuple) {
- return this.tuples.add( tuple );
+ return this.tuples.add(tuple);
}
void removeTuple(LeapsTuple tuple) {
- this.tuples.remove( tuple );
+ this.tuples.remove(tuple);
}
}
diff --git a/drools-core/src/main/java/org/drools/leaps/LeapsRule.java b/drools-core/src/main/java/org/drools/leaps/LeapsRule.java
index 5748b92ce5a..6179ced1b48 100644
--- a/drools-core/src/main/java/org/drools/leaps/LeapsRule.java
+++ b/drools-core/src/main/java/org/drools/leaps/LeapsRule.java
@@ -18,6 +18,8 @@
import java.util.ArrayList;
+import org.drools.common.ActivationQueue;
+import org.drools.common.AgendaGroupImpl;
import org.drools.rule.EvalCondition;
import org.drools.rule.Rule;
@@ -29,7 +31,7 @@
*
*/
class LeapsRule {
- Rule rule;
+ Rule rule;
final ColumnConstraints[] columnConstraints;
@@ -37,43 +39,45 @@ class LeapsRule {
final ColumnConstraints[] existsColumnConstraints;
- final EvalCondition[] evalConditions;
+ final EvalCondition[] evalConditions;
- boolean notColumnsPresent;
+ boolean notColumnsPresent;
- boolean existsColumnsPresent;
+ boolean existsColumnsPresent;
- boolean evalCoditionsPresent;
+ boolean evalCoditionsPresent;
- final Class[] existsNotsClasses;
+ final Class[] existsNotsClasses;
- public LeapsRule(Rule rule,
- ArrayList columns,
- ArrayList notColumns,
- ArrayList existsColumns,
- ArrayList evalConditions) {
+ public LeapsRule(Rule rule, ArrayList columns, ArrayList notColumns,
+ ArrayList existsColumns, ArrayList evalConditions) {
this.rule = rule;
- this.columnConstraints = (ColumnConstraints[]) columns.toArray( new ColumnConstraints[0] );
- this.notColumnConstraints = (ColumnConstraints[]) notColumns.toArray( new ColumnConstraints[0] );
- this.existsColumnConstraints = (ColumnConstraints[]) existsColumns.toArray( new ColumnConstraints[0] );
- this.evalConditions = (EvalCondition[]) evalConditions.toArray( new EvalCondition[0] );
+ this.columnConstraints = (ColumnConstraints[]) columns
+ .toArray(new ColumnConstraints[0]);
+ this.notColumnConstraints = (ColumnConstraints[]) notColumns
+ .toArray(new ColumnConstraints[0]);
+ this.existsColumnConstraints = (ColumnConstraints[]) existsColumns
+ .toArray(new ColumnConstraints[0]);
+ this.evalConditions = (EvalCondition[]) evalConditions
+ .toArray(new EvalCondition[0]);
this.notColumnsPresent = (this.notColumnConstraints.length != 0);
this.existsColumnsPresent = (this.existsColumnConstraints.length != 0);
this.evalCoditionsPresent = (this.evalConditions.length != 0);
ArrayList classes = new ArrayList();
- for ( int i = 0; i < this.notColumnConstraints.length; i++ ) {
- if ( classes.contains( this.notColumnConstraints[i].getClassType() ) ) {
- classes.add( this.notColumnConstraints[i].getClassType() );
+ for (int i = 0; i < this.notColumnConstraints.length; i++) {
+ if (classes.contains(this.notColumnConstraints[i].getClassType())) {
+ classes.add(this.notColumnConstraints[i].getClassType());
}
}
- for ( int i = 0; i < this.existsColumnConstraints.length; i++ ) {
- if ( !classes.contains( this.existsColumnConstraints[i].getClassType() ) ) {
- classes.add( this.existsColumnConstraints[i].getClassType() );
+ for (int i = 0; i < this.existsColumnConstraints.length; i++) {
+ if (!classes.contains(this.existsColumnConstraints[i]
+ .getClassType())) {
+ classes.add(this.existsColumnConstraints[i].getClassType());
}
}
- this.existsNotsClasses = (Class[]) classes.toArray( new Class[0] );
+ this.existsNotsClasses = (Class[]) classes.toArray(new Class[0]);
}
Rule getRule() {
@@ -139,4 +143,30 @@ public boolean equals(Object that) {
Class[] getExistsNotColumnsClasses() {
return this.existsNotsClasses;
}
+
+ /**
+ * to simulate terminal node memory we introduce
+ * TerminalNodeMemory type attributes here
+ *
+ */
+ private AgendaGroupImpl agendaGroup;
+
+ private ActivationQueue lifo;
+
+ public ActivationQueue getLifo() {
+ return this.lifo;
+ }
+
+ public void setLifo(ActivationQueue lifo) {
+ this.lifo = lifo;
+ }
+
+ public AgendaGroupImpl getAgendaGroup() {
+ return this.agendaGroup;
+ }
+
+ public void setAgendaGroup(AgendaGroupImpl agendaGroup) {
+ this.agendaGroup = agendaGroup;
+ }
+
}
diff --git a/drools-core/src/main/java/org/drools/leaps/LeapsTuple.java b/drools-core/src/main/java/org/drools/leaps/LeapsTuple.java
index dcbbf1ea3da..1db7fa6032f 100644
--- a/drools-core/src/main/java/org/drools/leaps/LeapsTuple.java
+++ b/drools-core/src/main/java/org/drools/leaps/LeapsTuple.java
@@ -32,52 +32,51 @@
*
* @author Alexander Bagerman
*/
-class LeapsTuple
- implements
- Tuple,
- Serializable {
- private static final long serialVersionUID = 1L;
+class LeapsTuple implements Tuple, Serializable {
+ private static final long serialVersionUID = 1L;
private final PropagationContext context;
- private boolean readyForActivation;
+ private boolean readyForActivation;
- private final FactHandleImpl[] factHandles;
+ private final FactHandleImpl[] factHandles;
- private Set[] notFactHandles;
+ private Set[] notFactHandles;
- private Set[] existsFactHandles;
+ private Set[] existsFactHandles;
- private Set logicalDependencies;
+ private Set logicalDependencies;
- private Activation activation;
+ private Activation activation;
- private final LeapsRule leapsRule;
+ private final LeapsRule leapsRule;
/**
* agendaItem parts
*/
- LeapsTuple(FactHandleImpl factHandles[],
- LeapsRule leapsRule,
- PropagationContext context) {
+ LeapsTuple(FactHandleImpl factHandles[], LeapsRule leapsRule,
+ PropagationContext context) {
this.factHandles = factHandles;
this.leapsRule = leapsRule;
this.context = context;
- if ( this.leapsRule != null && this.leapsRule.containsNotColumns() ) {
- this.notFactHandles = new HashSet[this.leapsRule.getNotColumnConstraints().length];
+ if (this.leapsRule != null && this.leapsRule.containsNotColumns()) {
+ this.notFactHandles = new HashSet[this.leapsRule
+ .getNotColumnConstraints().length];
}
- if ( this.leapsRule != null && this.leapsRule.containsExistsColumns() ) {
- this.existsFactHandles = new HashSet[this.leapsRule.getExistsColumnConstraints().length];
+ if (this.leapsRule != null && this.leapsRule.containsExistsColumns()) {
+ this.existsFactHandles = new HashSet[this.leapsRule
+ .getExistsColumnConstraints().length];
}
- this.readyForActivation = (this.leapsRule == null || !this.leapsRule.containsExistsColumns());
+ this.readyForActivation = (this.leapsRule == null || !this.leapsRule
+ .containsExistsColumns());
}
/**
* get rule that caused this tuple to be generated
*
- * @return rule
+ * @return rule
*/
LeapsRule getLeapsRule() {
return this.leapsRule;
@@ -95,8 +94,8 @@ LeapsRule getLeapsRule() {
* @see org.drools.spi.Tuple
*/
public boolean dependsOn(FactHandle handle) {
- for ( int i = 0, length = this.factHandles.length; i < length; i++ ) {
- if ( handle.equals( this.factHandles[i] ) ) {
+ for (int i = 0, length = this.factHandles.length; i < length; i++) {
+ if (handle.equals(this.factHandles[i])) {
return true;
}
}
@@ -114,7 +113,7 @@ public FactHandle get(int col) {
* @see org.drools.spi.Tuple
*/
public FactHandle get(Declaration declaration) {
- return this.get( declaration.getColumn() );
+ return this.get(declaration.getColumn());
}
/**
@@ -149,21 +148,21 @@ Activation getActivation() {
* @see java.lang.Object
*/
public boolean equals(Object object) {
- if ( this == object ) {
+ if (this == object) {
return true;
}
- if ( object == null || !(object instanceof LeapsTuple) ) {
+ if (object == null || !(object instanceof LeapsTuple)) {
return false;
}
FactHandle[] thatFactHandles = ((LeapsTuple) object).getFactHandles();
- if ( thatFactHandles.length != this.factHandles.length ) {
+ if (thatFactHandles.length != this.factHandles.length) {
return false;
}
- for ( int i = 0, length = this.factHandles.length; i < length; i++ ) {
- if ( !this.factHandles[i].equals( thatFactHandles[i] ) ) {
+ for (int i = 0, length = this.factHandles.length; i < length; i++) {
+ if (!this.factHandles[i].equals(thatFactHandles[i])) {
return false;
}
@@ -184,67 +183,66 @@ boolean isReadyForActivation() {
* @see java.lang.Object
*/
public String toString() {
- StringBuffer buffer = new StringBuffer( "LeapsTuple [" + this.context.getRuleOrigin().getName() + "] " );
+ StringBuffer buffer = new StringBuffer("LeapsTuple ["
+ + this.leapsRule.getRule().getName() + "] ");
- for ( int i = 0, length = this.factHandles.length; i < length; i++ ) {
- buffer.append( ((i == 0) ? "" : ", ") + this.factHandles[i] );
+ for (int i = 0, length = this.factHandles.length; i < length; i++) {
+ buffer.append(((i == 0) ? "" : ", ") + this.factHandles[i]);
}
- if ( this.existsFactHandles != null ) {
- buffer.append( "\nExists fact handles by position" );
- for ( int i = 0, length = this.existsFactHandles.length; i < length; i++ ) {
- buffer.append( "\nposition " + i );
- for ( Iterator it = this.existsFactHandles[i].iterator(); it.hasNext(); ) {
- buffer.append( "\n\t" + it.next() );
+ if (this.existsFactHandles != null) {
+ buffer.append("\nExists fact handles by position");
+ for (int i = 0, length = this.existsFactHandles.length; i < length; i++) {
+ buffer.append("\nposition " + i);
+ for (Iterator it = this.existsFactHandles[i].iterator(); it
+ .hasNext();) {
+ buffer.append("\n\t" + it.next());
}
}
}
- if ( this.notFactHandles != null ) {
- buffer.append( "\nNot fact handles by position" );
- for ( int i = 0, length = this.notFactHandles.length; i < length; i++ ) {
- buffer.append( "\nposition " + i );
- for ( Iterator it = this.notFactHandles[i].iterator(); it.hasNext(); ) {
- buffer.append( "\n\t" + it.next() );
+ if (this.notFactHandles != null) {
+ buffer.append("\nNot fact handles by position");
+ for (int i = 0, length = this.notFactHandles.length; i < length; i++) {
+ buffer.append("\nposition " + i);
+ for (Iterator it = this.notFactHandles[i].iterator(); it
+ .hasNext();) {
+ buffer.append("\n\t" + it.next());
}
}
}
return buffer.toString();
}
- void addNotFactHandle(FactHandle factHandle,
- int index) {
+ void addNotFactHandle(FactHandle factHandle, int index) {
this.readyForActivation = false;
Set facts = this.notFactHandles[index];
- if ( facts == null ) {
+ if (facts == null) {
facts = new HashSet();
this.notFactHandles[index] = facts;
}
- facts.add( factHandle );
+ facts.add(factHandle);
}
- void removeNotFactHandle(FactHandle factHandle,
- int index) {
- if ( this.notFactHandles[index] != null ) {
- this.notFactHandles[index].remove( factHandle );
+ void removeNotFactHandle(FactHandle factHandle, int index) {
+ if (this.notFactHandles[index] != null) {
+ this.notFactHandles[index].remove(factHandle);
}
this.setReadyForActivation();
}
- void addExistsFactHandle(FactHandle factHandle,
- int index) {
+ void addExistsFactHandle(FactHandle factHandle, int index) {
Set facts = this.existsFactHandles[index];
- if ( facts == null ) {
+ if (facts == null) {
facts = new HashSet();
this.existsFactHandles[index] = facts;
}
- facts.add( factHandle );
+ facts.add(factHandle);
this.setReadyForActivation();
}
- void removeExistsFactHandle(FactHandle factHandle,
- int index) {
- if ( this.existsFactHandles[index] != null ) {
- this.existsFactHandles[index].remove( factHandle );
+ void removeExistsFactHandle(FactHandle factHandle, int index) {
+ if (this.existsFactHandles[index] != null) {
+ this.existsFactHandles[index].remove(factHandle);
}
this.setReadyForActivation();
}
@@ -252,16 +250,18 @@ void removeExistsFactHandle(FactHandle factHandle,
private void setReadyForActivation() {
this.readyForActivation = true;
- if ( this.notFactHandles != null ) {
- for ( int i = 0, length = this.notFactHandles.length; i < length && this.readyForActivation; i++ ) {
- if ( this.notFactHandles[i].size() > 0 ) {
+ if (this.notFactHandles != null) {
+ for (int i = 0, length = this.notFactHandles.length; i < length
+ && this.readyForActivation; i++) {
+ if (this.notFactHandles[i].size() > 0) {
this.readyForActivation = false;
}
}
}
- if ( this.existsFactHandles != null ) {
- for ( int i = 0, length = this.existsFactHandles.length; i < length && this.readyForActivation; i++ ) {
- if ( this.existsFactHandles[i].size() == 0 ) {
+ if (this.existsFactHandles != null) {
+ for (int i = 0, length = this.existsFactHandles.length; i < length
+ && this.readyForActivation; i++) {
+ if (this.existsFactHandles[i].size() == 0) {
this.readyForActivation = false;
}
}
@@ -273,14 +273,14 @@ PropagationContext getContext() {
}
void addLogicalDependency(FactHandle handle) {
- if ( this.logicalDependencies == null ) {
+ if (this.logicalDependencies == null) {
this.logicalDependencies = new HashSet();
}
- this.logicalDependencies.add( handle );
+ this.logicalDependencies.add(handle);
}
Iterator getLogicalDependencies() {
- if ( this.logicalDependencies != null ) {
+ if (this.logicalDependencies != null) {
return this.logicalDependencies.iterator();
}
return null;
diff --git a/drools-core/src/main/java/org/drools/leaps/RuleBaseImpl.java b/drools-core/src/main/java/org/drools/leaps/RuleBaseImpl.java
index 2784906ebef..b5d0e71c94e 100644
--- a/drools-core/src/main/java/org/drools/leaps/RuleBaseImpl.java
+++ b/drools-core/src/main/java/org/drools/leaps/RuleBaseImpl.java
@@ -17,7 +17,6 @@
*/
import java.util.HashMap;
-import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@@ -30,6 +29,7 @@
import org.drools.WorkingMemory;
import org.drools.rule.InvalidPatternException;
import org.drools.rule.Package;
+import org.drools.rule.PackageCompilationData;
import org.drools.rule.Rule;
import org.drools.spi.FactHandleFactory;
@@ -40,23 +40,20 @@
* @author Alexander Bagerman
*
*/
-public class RuleBaseImpl
- implements
- RuleBase {
- private static final long serialVersionUID = 1487738104393155409L;
+public class RuleBaseImpl implements RuleBase {
+ private static final long serialVersionUID = 1487738104393155409L;
- private HashMap leapsRules = new HashMap();
+ private HashMap leapsRules = new HashMap();
/**
* The fact handle factory.
*/
- private final HandleFactory factHandleFactory;
+ private final FactHandleFactory factHandleFactory;
- private Set rulesPackages;
+ private Map globalDeclarations;
- private Map applicationData;
+ private Map rulesPackages;
- // @todo: replace this with a weak HashSet
/**
* WeakHashMap to keep references of WorkingMemories but allow them to be
* garbage collected
@@ -64,20 +61,17 @@ public class RuleBaseImpl
private final transient Map workingMemories;
/** Special value when adding to the underlying map. */
- private static final Object PRESENT = new Object();
+ private static final Object PRESENT = new Object();
/**
* Construct.
*
* @param rete
* The rete network.
- * @throws PackageIntegrationException
+ * @throws PackageIntegrationException
*/
public RuleBaseImpl() throws PackageIntegrationException {
- this( new HandleFactory(),
- new HashSet(),
- new HashMap() );
-
+ this(new HandleFactory());
}
/**
@@ -91,50 +85,43 @@ public RuleBaseImpl() throws PackageIntegrationException {
* The fact handle factory.
* @param pkgs
* @param applicationData
- * @throws PackageIntegrationException
- * @throws Exception
+ * @throws PackageIntegrationException
+ * @throws Exception
*/
- public RuleBaseImpl(FactHandleFactory factHandleFactory,
- Set pkgs,
- Map applicationData) throws PackageIntegrationException {
- // because we can deal only with leaps fact handle factory
+ public RuleBaseImpl(FactHandleFactory factHandleFactory) {
+ // casting to make sure that it's leaps handle factory
this.factHandleFactory = (HandleFactory) factHandleFactory;
- this.rulesPackages = pkgs;
- this.applicationData = applicationData;
+ this.globalDeclarations = new HashMap();
this.workingMemories = new WeakHashMap();
- this.rulesPackages = new HashSet();
- for ( Iterator it = pkgs.iterator(); it.hasNext(); ) {
- this.addPackage( (Package) it.next() );
- }
+ this.rulesPackages = new HashMap();
}
/**
* @see RuleBase
*/
public WorkingMemory newWorkingMemory() {
- return newWorkingMemory( true );
+ return newWorkingMemory(true);
}
/**
* @see RuleBase
*/
public WorkingMemory newWorkingMemory(boolean keepReference) {
- WorkingMemoryImpl workingMemory = new WorkingMemoryImpl( this );
+ WorkingMemoryImpl workingMemory = new WorkingMemoryImpl(this);
// add all rules added so far
- for ( Iterator it = this.leapsRules.values().iterator(); it.hasNext(); ) {
- workingMemory.addLeapsRules( (List) it.next() );
+ for (Iterator it = this.leapsRules.values().iterator(); it.hasNext();) {
+ workingMemory.addLeapsRules((List) it.next());
}
//
- if ( keepReference ) {
- this.workingMemories.put( workingMemory,
- RuleBaseImpl.PRESENT );
+ if (keepReference) {
+ this.workingMemories.put(workingMemory, RuleBaseImpl.PRESENT);
}
return workingMemory;
}
void disposeWorkingMemory(WorkingMemory workingMemory) {
- this.workingMemories.remove( workingMemory );
+ this.workingMemories.remove(workingMemory);
}
/**
@@ -157,12 +144,14 @@ public FactHandleFactory newFactHandleFactory() {
/**
* @see RuleBase
*/
+
public Package[] getPackages() {
- return (Package[]) this.rulesPackages.toArray( new Package[this.rulesPackages.size()] );
+ return (Package[]) this.rulesPackages.values().toArray(
+ new Package[this.rulesPackages.size()]);
}
- public Map getApplicationData() {
- return this.applicationData;
+ public Map getGlobalDeclarations() {
+ return this.globalDeclarations;
}
/**
@@ -172,32 +161,74 @@ public Map getApplicationData() {
*
* @param rulesPackage
* The rule-set to add.
- * @throws PackageIntegrationException
+ * @throws PackageIntegrationException
*
* @throws FactException
* @throws InvalidPatternException
*/
- public void addPackage(Package rulesPackage) throws PackageIntegrationException {
- rulesPackage.checkValidity();
- Map newApplicationData = rulesPackage.getGlobals();
+ public void addPackage(Package newPackage)
+ throws PackageIntegrationException {
+ newPackage.checkValidity();
+ Package pkg = (Package) this.rulesPackages.get(newPackage.getName());
+ if (pkg != null) {
+ mergePackage(pkg, newPackage);
+ } else {
+ this.rulesPackages.put(newPackage.getName(), newPackage);
+ }
- // Check that the application data is valid, we cannot change the type
- // of an already declared application data variable
- for ( Iterator it = newApplicationData.keySet().iterator(); it.hasNext(); ) {
+ Map newGlobals = newPackage.getGlobals();
+
+ // Check that the global data is valid, we cannot change the type
+ // of an already declared global variable
+ for (Iterator it = newGlobals.keySet().iterator(); it.hasNext();) {
String identifier = (String) it.next();
- Class type = (Class) newApplicationData.get( identifier );
- if ( this.applicationData.containsKey( identifier ) && !this.applicationData.get( identifier ).equals( type ) ) {
- throw new PackageIntegrationException( rulesPackage );
+ Class type = (Class) newGlobals.get(identifier);
+ if (this.globalDeclarations.containsKey(identifier)
+ && !this.globalDeclarations.get(identifier).equals(type)) {
+ throw new PackageIntegrationException(pkg);
}
}
- this.applicationData.putAll( newApplicationData );
+ this.globalDeclarations.putAll(newGlobals);
+
+ Rule[] rules = newPackage.getRules();
+
+ for (int i = 0; i < rules.length; ++i) {
+ addRule(rules[i]);
+ }
+ }
+
+ public void mergePackage(Package existingPackage, Package newPackage)
+ throws PackageIntegrationException {
+ Map globals = existingPackage.getGlobals();
+ List imports = existingPackage.getImports();
+
+ // First update the binary files
+ // @todo: this probably has issues if you add classes in the incorrect
+ // order - functions, rules, invokers.
+ PackageCompilationData compilationData = existingPackage
+ .getPackageCompilationData();
+ PackageCompilationData newCompilationData = newPackage
+ .getPackageCompilationData();
+ String[] files = newCompilationData.list();
+ for (int i = 0, length = files.length; i < length; i++) {
+ compilationData.write(files[i], newCompilationData.read(files[i]));
+ }
- this.rulesPackages.add( rulesPackage );
+ // Merge imports
+ imports.addAll(newPackage.getImports());
- Rule[] rules = rulesPackage.getRules();
+ // Add invokers
+ compilationData.putAllInvokers(newCompilationData.getInvokers());
- for ( int i = 0, length = rules.length; i < length; ++i ) {
- addRule( rules[i] );
+ // Add globals
+ for (Iterator it = globals.keySet().iterator(); it.hasNext();) {
+ String identifier = (String) it.next();
+ Class type = (Class) globals.get(identifier);
+ if (globals.containsKey(identifier)
+ && !globals.get(identifier).equals(type)) {
+ throw new PackageIntegrationException(
+ "Unable to merge new Package", newPackage);
+ }
}
}
@@ -209,23 +240,35 @@ public void addPackage(Package rulesPackage) throws PackageIntegrationException
* @throws InvalidPatternException
*/
public void addRule(Rule rule) throws FactException,
- InvalidPatternException {
- if ( !rule.isValid() ) {
- throw new IllegalArgumentException( "The rule called " + rule.getName() + " is not valid. Check for compile errors reported." );
+ InvalidPatternException {
+ if (!rule.isValid()) {
+ throw new IllegalArgumentException("The rule called "
+ + rule.getName()
+ + " is not valid. Check for compile errors reported.");
}
- List rules = Builder.processRule( rule );
+ List rules = Builder.processRule(rule);
- this.leapsRules.put( rule,
- rules );
+ this.leapsRules.put(rule, rules);
+
+ for (Iterator it = this.workingMemories.keySet().iterator(); it
+ .hasNext();) {
+ ((WorkingMemoryImpl) it.next()).addLeapsRules(rules);
+ }
- for ( Iterator it = this.workingMemories.keySet().iterator(); it.hasNext(); ) {
- ((WorkingMemoryImpl) it.next()).addLeapsRules( rules );
+ // Iterate each workingMemory and attempt to fire any rules, that were
+ // activated as a result of the new rule addition
+ for (Iterator it = this.workingMemories.keySet().iterator(); it
+ .hasNext();) {
+ WorkingMemoryImpl workingMemory = (WorkingMemoryImpl) it.next();
+ workingMemory.fireAllRules();
}
}
public void removeRule(Rule rule) {
- for ( Iterator it = this.workingMemories.keySet().iterator(); it.hasNext(); ) {
- ((WorkingMemoryImpl) it.next()).removeRule( (List) this.leapsRules.remove( rule ) );
+ for (Iterator it = this.workingMemories.keySet().iterator(); it
+ .hasNext();) {
+ ((WorkingMemoryImpl) it.next()).removeRule((List) this.leapsRules
+ .remove(rule));
}
}
diff --git a/drools-core/src/main/java/org/drools/leaps/Token.java b/drools-core/src/main/java/org/drools/leaps/Token.java
index 8463bc452ef..8ec2ad3e140 100644
--- a/drools-core/src/main/java/org/drools/leaps/Token.java
+++ b/drools-core/src/main/java/org/drools/leaps/Token.java
@@ -34,40 +34,43 @@
* @author Alexander Bagerman
*
*/
-class Token
- implements
- Tuple,
- Serializable {
+class Token implements Tuple, Serializable {
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 1L;
- private WorkingMemoryImpl workingMemory;
+ private WorkingMemoryImpl workingMemory;
private final FactHandleImpl dominantFactHandle;
- private RuleHandle currentRuleHandle = null;
+ private RuleHandle currentRuleHandle = null;
- private FactHandleImpl[] currentFactHandles = new FactHandleImpl[0];
+ private FactHandleImpl[] currentFactHandles = new FactHandleImpl[0];
- boolean resume = false;
+ boolean resume = false;
- private Iterator rules = null;
+ private Iterator rules = null;
+
+ private final PropagationContextImpl propagationContext;
/**
- * agendaItem parts
+ *
*/
- public Token(WorkingMemoryImpl workingMemory,
- FactHandleImpl factHandle) {
+ public Token(WorkingMemoryImpl workingMemory, FactHandleImpl factHandle,
+ PropagationContextImpl propagationContext) {
this.workingMemory = workingMemory;
this.dominantFactHandle = factHandle;
+ this.propagationContext = propagationContext;
}
private Iterator rulesIterator() {
- if ( this.rules == null ) {
- if ( this.dominantFactHandle != null ) {
- this.rules = this.workingMemory.getFactTable( this.dominantFactHandle.getObject().getClass() ).getRulesIterator();
+ if (this.rules == null) {
+ if (this.dominantFactHandle != null) {
+ this.rules = this.workingMemory.getFactTable(
+ this.dominantFactHandle.getObject().getClass())
+ .getRulesIterator();
} else {
- this.rules = this.workingMemory.getNoRequiredColumnsLeapsRules();
+ this.rules = this.workingMemory
+ .getNoRequiredColumnsLeapsRules();
}
}
return this.rules;
@@ -75,7 +78,8 @@ private Iterator rulesIterator() {
public RuleHandle nextRuleHandle() {
this.currentRuleHandle = (RuleHandle) this.rules.next();
- this.currentFactHandles = new FactHandleImpl[this.currentRuleHandle.getLeapsRule().getNumberOfColumns()];
+ this.currentFactHandles = new FactHandleImpl[this.currentRuleHandle
+ .getLeapsRule().getNumberOfColumns()];
return this.currentRuleHandle;
}
@@ -87,19 +91,21 @@ public RuleHandle nextRuleHandle() {
public boolean hasNextRuleHandle() {
boolean ret = false;
- if ( this.rulesIterator() != null ) {
+ if (this.rulesIterator() != null) {
// starting with calling rulesIterator() to make sure that we picks
// rules because fact can be asserted before rules added
long levelId = this.workingMemory.getIdLastFireAllAt();
- if ( this.dominantFactHandle == null || this.dominantFactHandle.getId() >= levelId ) {
+ if (this.dominantFactHandle == null
+ || this.dominantFactHandle.getId() >= levelId) {
ret = this.rules.hasNext();
} else {
// then we need to skip rules that have id lower than
// workingMemory.idLastFireAllAt
boolean done = false;
- while ( !done ) {
- if ( this.rules.hasNext() ) {
- if ( ((RuleHandle) ((TableIterator) this.rules).peekNext()).getId() > levelId ) {
+ while (!done) {
+ if (this.rules.hasNext()) {
+ if (((RuleHandle) ((TableIterator) this.rules)
+ .peekNext()).getId() > levelId) {
ret = true;
done = true;
} else {
@@ -116,15 +122,14 @@ public boolean hasNextRuleHandle() {
}
public int hashCode() {
- if ( this.dominantFactHandle != null ) {
+ if (this.dominantFactHandle != null) {
return this.dominantFactHandle.hashCode();
} else {
return 0;
}
}
- public void set(int idx,
- FactHandleImpl factHandle) {
+ public void set(int idx, FactHandleImpl factHandle) {
this.currentFactHandles[idx] = factHandle;
}
@@ -150,11 +155,14 @@ public void setResume(boolean resume) {
* @see Object
*/
public boolean equals(Object that) {
- if ( this == that ) return true;
- if ( !(that instanceof Token) ) return false;
- if ( this.dominantFactHandle != null ) {
- if ( ((Token) that).dominantFactHandle != null ) {
- return this.dominantFactHandle.getId() == ((Token) that).dominantFactHandle.getId();
+ if (this == that)
+ return true;
+ if (!(that instanceof Token))
+ return false;
+ if (this.dominantFactHandle != null) {
+ if (((Token) that).dominantFactHandle != null) {
+ return this.dominantFactHandle.getId() == ((Token) that).dominantFactHandle
+ .getId();
} else {
return false;
}
@@ -178,7 +186,7 @@ public FactHandle get(int idx) {
* @see org.drools.spi.Tuple
*/
public FactHandle get(Declaration declaration) {
- return this.get( declaration.getColumn() );
+ return this.get(declaration.getColumn());
}
/**
@@ -202,10 +210,14 @@ public WorkingMemory getWorkingMemory() {
* @see java.lang.Object
*/
public String toString() {
- String ret = "TOKEN [" + this.dominantFactHandle + "]\n" + "\tRULE : " + this.currentRuleHandle + "\n";
- if ( this.currentFactHandles != null ) {
- for ( int i = 0, length = this.currentFactHandles.length; i < length; i++ ) {
- ret = ret + ((i == this.currentRuleHandle.getDominantPosition()) ? "***" : "") + "\t" + i + " -> " + this.currentFactHandles[i].getObject() + "\n";
+ String ret = "TOKEN [" + this.dominantFactHandle + "]\n" + "\tRULE : "
+ + this.currentRuleHandle + "\n";
+ if (this.currentFactHandles != null) {
+ for (int i = 0, length = this.currentFactHandles.length; i < length; i++) {
+ ret = ret
+ + ((i == this.currentRuleHandle.getDominantPosition()) ? "***"
+ : "") + "\t" + i + " -> "
+ + this.currentFactHandles[i].getObject() + "\n";
}
}
return ret;
@@ -216,10 +228,9 @@ public String toString() {
*
* @return LeapsTuple
*/
- LeapsTuple getTuple(PropagationContextImpl context) {
- return new LeapsTuple( this.currentFactHandles,
- this.currentRuleHandle.getLeapsRule(),
- context );
+ LeapsTuple getTuple() {
+ return new LeapsTuple(this.currentFactHandles, this.currentRuleHandle
+ .getLeapsRule(), this.propagationContext);
}
/**
@@ -232,8 +243,8 @@ LeapsTuple getTuple(PropagationContextImpl context) {
* object, otherwise <code>false</code>.
*/
public boolean dependsOn(FactHandle handle) {
- for ( int i = 0, length = this.currentFactHandles.length; i < length; i++ ) {
- if ( this.currentFactHandles[i].equals( handle ) ) {
+ for (int i = 0, length = this.currentFactHandles.length; i < length; i++) {
+ if (this.currentFactHandles[i].equals(handle)) {
return true;
}
}
@@ -250,4 +261,8 @@ public boolean dependsOn(FactHandle handle) {
public void setActivation(Activation activation) {
// do nothing
}
+
+ public PropagationContextImpl getPropagationContext() {
+ return propagationContext;
+ }
}
diff --git a/drools-core/src/main/java/org/drools/leaps/TokenEvaluator.java b/drools-core/src/main/java/org/drools/leaps/TokenEvaluator.java
index acc7bcd1c2e..8b21b712ee7 100644
--- a/drools-core/src/main/java/org/drools/leaps/TokenEvaluator.java
+++ b/drools-core/src/main/java/org/drools/leaps/TokenEvaluator.java
@@ -16,13 +16,10 @@
* limitations under the License.
*/
-import org.drools.common.PropagationContextImpl;
import org.drools.leaps.util.Table;
import org.drools.leaps.util.TableIterator;
import org.drools.rule.EvalCondition;
import org.drools.rule.InvalidRuleException;
-import org.drools.spi.Activation;
-import org.drools.spi.PropagationContext;
/**
* helper class that does condition evaluation on token when working memory does
@@ -41,32 +38,51 @@ final class TokenEvaluator {
* @throws Exception
* @throws InvalidRuleException
*/
- final static protected void evaluate(Token token) throws NoMatchesFoundException,
- Exception,
- InvalidRuleException {
- WorkingMemoryImpl workingMemory = (WorkingMemoryImpl) token.getWorkingMemory();
+ final static protected void evaluate(Token token)
+ throws NoMatchesFoundException, InvalidRuleException {
+ WorkingMemoryImpl workingMemory = (WorkingMemoryImpl) token
+ .getWorkingMemory();
LeapsRule leapsRule = token.getCurrentRuleHandle().getLeapsRule();
// sometimes there is no normal conditions, only not and exists
int numberOfColumns = leapsRule.getNumberOfColumns();
- if ( numberOfColumns > 0 ) {
- int dominantFactPosition = token.getCurrentRuleHandle().getDominantPosition();
- if ( leapsRule.getColumnConstraintsAtPosition( dominantFactPosition ).isAllowedAlpha( token.getDominantFactHandle(),
- token,
- workingMemory ) ) {
+ if (numberOfColumns > 0) {
+ int dominantFactPosition = token.getCurrentRuleHandle()
+ .getDominantPosition();
+ if (leapsRule.getColumnConstraintsAtPosition(dominantFactPosition)
+ .isAllowedAlpha(token.getDominantFactHandle(), token,
+ workingMemory)) {
TableIterator[] iterators = new TableIterator[numberOfColumns];
// getting iterators first
- for ( int i = 0; i < numberOfColumns; i++ ) {
- if ( i == dominantFactPosition ) {
- iterators[i] = Table.singleItemIterator( token.getDominantFactHandle() );
+ for (int i = 0; i < numberOfColumns; i++) {
+ if (i == dominantFactPosition) {
+ iterators[i] = Table.singleItemIterator(token
+ .getDominantFactHandle());
} else {
- if ( i > 0 && leapsRule.getColumnConstraintsAtPosition( i ).isAlphaPresent() ) {
- iterators[i] = workingMemory.getFactTable( leapsRule.getColumnClassObjectTypeAtPosition( i ) ).tailConstrainedIterator( workingMemory,
- leapsRule.getColumnConstraintsAtPosition( i ),
- token.getDominantFactHandle(),
- (token.isResume() ? token.get( i ) : token.getDominantFactHandle()) );
+ if (i > 0
+ && leapsRule.getColumnConstraintsAtPosition(i)
+ .isAlphaPresent()) {
+ iterators[i] = workingMemory
+ .getFactTable(
+ leapsRule
+ .getColumnClassObjectTypeAtPosition(i))
+ .tailConstrainedIterator(
+ workingMemory,
+ leapsRule
+ .getColumnConstraintsAtPosition(i),
+ token.getDominantFactHandle(),
+ (token.isResume() ? token.get(i)
+ : token
+ .getDominantFactHandle()));
} else {
- iterators[i] = workingMemory.getFactTable( leapsRule.getColumnClassObjectTypeAtPosition( i ) ).tailIterator( token.getDominantFactHandle(),
- (token.isResume() ? token.get( i ) : token.getDominantFactHandle()) );
+ iterators[i] = workingMemory
+ .getFactTable(
+ leapsRule
+ .getColumnClassObjectTypeAtPosition(i))
+ .tailIterator(
+ token.getDominantFactHandle(),
+ (token.isResume() ? token.get(i)
+ : token
+ .getDominantFactHandle()));
}
}
}
@@ -78,13 +94,16 @@ final static protected void evaluate(Token token) throws NoMatchesFoundException
boolean doReset = false;
boolean skip = token.isResume();
TableIterator currentIterator;
- for ( int i = 0; i < numberOfColumns && !someIteratorsEmpty; i++ ) {
+ for (int i = 0; i < numberOfColumns && !someIteratorsEmpty; i++) {
currentIterator = iterators[i];
- if ( currentIterator.isEmpty() ) {
+ if (currentIterator.isEmpty()) {
someIteratorsEmpty = true;
} else {
- if ( !doReset ) {
- if ( skip && currentIterator.hasNext() && !currentIterator.peekNext().equals( token.get( i ) ) ) {
+ if (!doReset) {
+ if (skip
+ && currentIterator.hasNext()
+ && !currentIterator.peekNext().equals(
+ token.get(i))) {
skip = false;
doReset = true;
}
@@ -95,7 +114,7 @@ final static protected void evaluate(Token token) throws NoMatchesFoundException
}
// check if one of them is empty and immediate return
- if ( someIteratorsEmpty ) {
+ if (someIteratorsEmpty) {
throw new NoMatchesFoundException();
// "some of tables do not have facts");
}
@@ -103,49 +122,48 @@ final static protected void evaluate(Token token) throws NoMatchesFoundException
// column position in the nested loop
int jj = 0;
boolean done = false;
- while ( !done ) {
+ while (!done) {
currentIterator = iterators[jj];
- if ( !currentIterator.hasNext() ) {
- if ( jj == 0 ) {
+ if (!currentIterator.hasNext()) {
+ if (jj == 0) {
done = true;
} else {
//
currentIterator.reset();
- token.set( jj,
- (FactHandleImpl) null );
+ token.set(jj, (FactHandleImpl) null);
jj = jj - 1;
- if ( skip ) {
+ if (skip) {
skip = false;
}
}
} else {
currentIterator.next();
- token.set( jj,
- (FactHandleImpl) iterators[jj].current() );
+ token.set(jj, (FactHandleImpl) iterators[jj].current());
// check if match found
// we need to check only beta for dominant fact
// alpha was already checked
boolean localMatch = false;
- if ( jj == 0 && jj != dominantFactPosition ) {
- localMatch = leapsRule.getColumnConstraintsAtPosition( jj ).isAllowed( token.get( jj ),
- token,
- workingMemory );
+ if (jj == 0 && jj != dominantFactPosition) {
+ localMatch = leapsRule
+ .getColumnConstraintsAtPosition(jj)
+ .isAllowed(token.get(jj), token,
+ workingMemory);
} else {
- localMatch = leapsRule.getColumnConstraintsAtPosition( jj ).isAllowedBeta( token.get( jj ),
- token,
- workingMemory );
+ localMatch = leapsRule
+ .getColumnConstraintsAtPosition(jj)
+ .isAllowedBeta(token.get(jj), token,
+ workingMemory);
}
- if ( localMatch ) {
+ if (localMatch) {
// start iteratating next iterator
// or for the last one check negative conditions and
// fire
// consequence
- if ( jj == (numberOfColumns - 1) ) {
- if ( !skip ) {
- if ( processAfterAllPositiveConstraintOk( token,
- leapsRule,
- workingMemory ) ) {
+ if (jj == (numberOfColumns - 1)) {
+ if (!skip) {
+ if (processAfterAllPositiveConstraintOk(
+ token, leapsRule, workingMemory)) {
return;
}
} else {
@@ -155,7 +173,7 @@ final static protected void evaluate(Token token) throws NoMatchesFoundException
jj = jj + 1;
}
} else {
- if ( skip ) {
+ if (skip) {
skip = false;
}
}
@@ -163,9 +181,8 @@ final static protected void evaluate(Token token) throws NoMatchesFoundException
}
}
} else {
- if ( processAfterAllPositiveConstraintOk( token,
- leapsRule,
- workingMemory ) ) {
+ if (processAfterAllPositiveConstraintOk(token, leapsRule,
+ workingMemory)) {
return;
}
}
@@ -174,8 +191,8 @@ final static protected void evaluate(Token token) throws NoMatchesFoundException
}
/**
- * Makes final check on eval, exists and not conditions after all column values
- * isAllowed by column constraints
+ * Makes final check on eval, exists and not conditions after all column
+ * values isAllowed by column constraints
*
* @param token
* @param leapsRule
@@ -184,39 +201,32 @@ final static protected void evaluate(Token token) throws NoMatchesFoundException
* @throws Exception
*/
final static boolean processAfterAllPositiveConstraintOk(Token token,
- LeapsRule leapsRule,
- WorkingMemoryImpl workingMemory) throws Exception {
- LeapsTuple tuple = token.getTuple( new PropagationContextImpl( workingMemory.increamentPropagationIdCounter(),
- PropagationContext.ASSERTION,
- leapsRule.getRule(),
- (Activation) null ) );
- if ( leapsRule.containsEvalConditions() ) {
- if ( !TokenEvaluator.evaluateEvalConditions( leapsRule,
- tuple,
- workingMemory ) ) {
+ LeapsRule leapsRule, WorkingMemoryImpl workingMemory) {
+ LeapsTuple tuple = token.getTuple();
+ if (leapsRule.containsEvalConditions()) {
+ if (!TokenEvaluator.evaluateEvalConditions(leapsRule, tuple,
+ workingMemory)) {
return false;
}
}
- if ( leapsRule.containsExistsColumns() ) {
- TokenEvaluator.evaluateExistsConditions( tuple,
- leapsRule,
- workingMemory );
+ if (leapsRule.containsExistsColumns()) {
+ TokenEvaluator.evaluateExistsConditions(tuple, leapsRule,
+ workingMemory);
}
- if ( leapsRule.containsNotColumns() ) {
- TokenEvaluator.evaluateNotConditions( tuple,
- leapsRule,
- workingMemory );
+ if (leapsRule.containsNotColumns()) {
+ TokenEvaluator.evaluateNotConditions(tuple, leapsRule,
+ workingMemory);
}
//
Class[] classes = leapsRule.getExistsNotColumnsClasses();
- for ( int i = 0, length = classes.length; i < length; i++ ) {
- workingMemory.getFactTable( classes[i] ).addTuple( tuple );
+ for (int i = 0, length = classes.length; i < length; i++) {
+ workingMemory.getFactTable(classes[i]).addTuple(tuple);
}
//
- if ( tuple.isReadyForActivation() ) {
+ if (tuple.isReadyForActivation()) {
// let agenda to do its work
- workingMemory.assertTuple( tuple );
+ workingMemory.assertTuple(tuple);
return true;
} else {
return false;
@@ -233,12 +243,10 @@ final static boolean processAfterAllPositiveConstraintOk(Token token,
* @throws Exception
*/
final static boolean evaluateEvalConditions(LeapsRule leapsRule,
- LeapsTuple tuple,
- WorkingMemoryImpl workingMemory) throws Exception {
+ LeapsTuple tuple, WorkingMemoryImpl workingMemory) {
EvalCondition[] evals = leapsRule.getEvalConditions();
- for ( int i = 0; i < evals.length; i++ ) {
- if ( !evals[i].isAllowed( tuple,
- workingMemory ) ) {
+ for (int i = 0; i < evals.length; i++) {
+ if (!evals[i].isAllowed(tuple, workingMemory)) {
return false;
}
}
@@ -254,28 +262,24 @@ final static boolean evaluateEvalConditions(LeapsRule leapsRule,
* @return success
* @throws Exception
*/
- final static void evaluateNotConditions(LeapsTuple tuple,
- LeapsRule rule,
- WorkingMemoryImpl workingMemory) throws Exception {
+ final static void evaluateNotConditions(LeapsTuple tuple, LeapsRule rule,
+ WorkingMemoryImpl workingMemory) {
FactHandleImpl factHandle;
TableIterator tableIterator;
ColumnConstraints constraint;
ColumnConstraints[] not = rule.getNotColumnConstraints();
- for ( int i = 0, length = not.length; i < length; i++ ) {
+ for (int i = 0, length = not.length; i < length; i++) {
constraint = not[i];
// scan the whole table
- tableIterator = workingMemory.getFactTable( constraint.getClassType() ).iterator();
+ tableIterator = workingMemory.getFactTable(
+ constraint.getClassType()).iterator();
// fails if exists
- while ( tableIterator.hasNext() ) {
+ while (tableIterator.hasNext()) {
factHandle = (FactHandleImpl) tableIterator.next();
// check constraint condition
- if ( constraint.isAllowed( factHandle,
- tuple,
- workingMemory ) ) {
- tuple.addNotFactHandle( factHandle,
- i );
- factHandle.addNotTuple( tuple,
- i );
+ if (constraint.isAllowed(factHandle, tuple, workingMemory)) {
+ tuple.addNotFactHandle(factHandle, i);
+ factHandle.addNotTuple(tuple, i);
}
}
}
@@ -289,27 +293,23 @@ final static void evaluateNotConditions(LeapsTuple tuple,
* @throws Exception
*/
final static void evaluateExistsConditions(LeapsTuple tuple,
- LeapsRule rule,
- WorkingMemoryImpl workingMemory) throws Exception {
+ LeapsRule rule, WorkingMemoryImpl workingMemory) {
FactHandleImpl factHandle;
TableIterator tableIterator;
ColumnConstraints constraint;
ColumnConstraints[] exists = rule.getExistsColumnConstraints();
- for ( int i = 0, length = exists.length; i < length; i++ ) {
+ for (int i = 0, length = exists.length; i < length; i++) {
constraint = exists[i];
// scan the whole table
- tableIterator = workingMemory.getFactTable( constraint.getClassType() ).iterator();
+ tableIterator = workingMemory.getFactTable(
+ constraint.getClassType()).iterator();
// fails if exists
- while ( tableIterator.hasNext() ) {
+ while (tableIterator.hasNext()) {
factHandle = (FactHandleImpl) tableIterator.next();
// check constraint conditions
- if ( constraint.isAllowed( factHandle,
- tuple,
- workingMemory ) ) {
- tuple.addExistsFactHandle( factHandle,
- i );
- factHandle.addExistsTuple( tuple,
- i );
+ if (constraint.isAllowed(factHandle, tuple, workingMemory)) {
+ tuple.addExistsFactHandle(factHandle, i);
+ factHandle.addExistsTuple(tuple, i);
}
}
}
diff --git a/drools-core/src/main/java/org/drools/leaps/WorkingMemoryImpl.java b/drools-core/src/main/java/org/drools/leaps/WorkingMemoryImpl.java
index 45916837a0e..b6beacc973d 100644
--- a/drools-core/src/main/java/org/drools/leaps/WorkingMemoryImpl.java
+++ b/drools-core/src/main/java/org/drools/leaps/WorkingMemoryImpl.java
@@ -62,20 +62,19 @@
* @see java.io.Serializable
*
*/
-class WorkingMemoryImpl extends AbstractWorkingMemory
- implements
- EventSupport,
- PropertyChangeListener {
- private static final long serialVersionUID = -2524904474925421759L;
+class WorkingMemoryImpl extends AbstractWorkingMemory implements EventSupport,
+ PropertyChangeListener {
+ private static final long serialVersionUID = -2524904474925421759L;
- protected final Agenda agenda;
+ protected final Agenda agenda;
- private final Map queryResults;
+ private final Map queryResults;
// rules consisting only of not and exists
- private final RuleTable noRequiredColumnsLeapsRules = new RuleTable( DefaultConflictResolver.getInstance().getRuleConflictResolver() );
+ private final RuleTable noRequiredColumnsLeapsRules = new RuleTable(
+ DefaultConflictResolver.getInstance().getRuleConflictResolver());
- private final IdentityMap leapsRulesToHandlesMap = new IdentityMap();
+ private final IdentityMap leapsRulesToHandlesMap = new IdentityMap();
/**
* Construct.
@@ -84,13 +83,17 @@ class WorkingMemoryImpl extends AbstractWorkingMemory
* The backing rule-base.
*/
public WorkingMemoryImpl(RuleBaseImpl ruleBase) {
- super( ruleBase,
- ruleBase.newFactHandleFactory() );
- this.agenda = new LeapsAgenda( this );
+ super(ruleBase, ruleBase.newFactHandleFactory());
+ this.agenda = new LeapsAgenda(this);
+ this.agenda.setFocus(AgendaGroup.MAIN);
+ //
this.queryResults = new HashMap();
// to pick up rules that do not require columns, only not and exists
- this.pushTokenOnStack( new Token( this,
- null ) );
+ PropagationContextImpl context = new PropagationContextImpl(
+ nextPropagationIdCounter(), PropagationContext.ASSERTION, null,
+ null);
+
+ this.pushTokenOnStack(new Token(this, null, context));
}
/**
@@ -99,25 +102,26 @@ public WorkingMemoryImpl(RuleBaseImpl ruleBase) {
* @return The new fact handle.
*/
FactHandle newFactHandle(Object object) {
- return ((HandleFactory) this.handleFactory).newFactHandle( object );
+ return ((HandleFactory) this.handleFactory).newFactHandle(object);
}
/**
* @see WorkingMemory
*/
- public void setGlobal(String name,
- Object value) {
+ public void setGlobal(String name, Object value) {
// Make sure the application data has been declared in the RuleBase
- Map applicationDataDefintions = ((RuleBaseImpl) this.ruleBase).getApplicationData();
- Class type = (Class) applicationDataDefintions.get( name );
- if ( (type == null) ) {
- throw new RuntimeException( "Unexpected application data [" + name + "]" );
- } else if ( !type.isInstance( value ) ) {
- throw new RuntimeException( "Illegal class for application data. " + "Expected [" + type.getName() + "], " + "found [" + value.getClass().getName() + "]." );
+ Map applicationDataDefintions = ((RuleBaseImpl) this.ruleBase)
+ .getGlobalDeclarations();
+ Class type = (Class) applicationDataDefintions.get(name);
+ if ((type == null)) {
+ throw new RuntimeException("Unexpected global [" + name + "]");
+ } else if (!type.isInstance(value)) {
+ throw new RuntimeException("Illegal class for global. "
+ + "Expected [" + type.getName() + "], " + "found ["
+ + value.getClass().getName() + "].");
} else {
- this.applicationData.put( name,
- value );
+ this.getGlobals().put(name, value);
}
}
@@ -143,8 +147,9 @@ public Object getObject(FactHandle handle) {
public List getObjects(Class objectClass) {
List list = new LinkedList();
- for ( Iterator it = this.getFactTable( objectClass ).iterator(); it.hasNext(); ) {
- list.add( it.next() );
+ for (Iterator it = this.getFactTable(objectClass).iterator(); it
+ .hasNext();) {
+ list.add(it.next());
}
return list;
@@ -162,40 +167,26 @@ public boolean containsObject(FactHandle handle) {
* @see WorkingMemory
*/
public FactHandle assertObject(Object object) throws FactException {
- return assertObject( object, /* Not-Dynamic */
- false,
- false,
- null,
- null );
+ return assertObject(object, /* Not-Dynamic */
+ false, false, null, null);
}
/**
* @see WorkingMemory
*/
public FactHandle assertLogicalObject(Object object) throws FactException {
- return assertObject( object, /* Not-Dynamic */
- false,
- true,
- null,
- null );
+ return assertObject(object, /* Not-Dynamic */
+ false, true, null, null);
}
- public FactHandle assertObject(Object object,
- boolean dynamic) throws FactException {
- return assertObject( object,
- dynamic,
- false,
- null,
- null );
+ public FactHandle assertObject(Object object, boolean dynamic)
+ throws FactException {
+ return assertObject(object, dynamic, false, null, null);
}
- public FactHandle assertLogicalObject(Object object,
- boolean dynamic) throws FactException {
- return assertObject( object,
- dynamic,
- true,
- null,
- null );
+ public FactHandle assertLogicalObject(Object object, boolean dynamic)
+ throws FactException {
+ return assertObject(object, dynamic, true, null, null);
}
/**
@@ -210,69 +201,64 @@ public FactHandle assertLogicalObject(Object object,
*
* @see WorkingMemory
*/
- public FactHandle assertObject(Object object,
- boolean dynamic,
- boolean logical,
- Rule rule,
- Activation activation) throws FactException {
+ public FactHandle assertObject(Object object, boolean dynamic,
+ boolean logical, Rule rule, Activation activation)
+ throws FactException {
// check if the object already exists in the WM
- FactHandleImpl handle = (FactHandleImpl) this.identityMap.get( object );
+ FactHandleImpl handle = (FactHandleImpl) this.identityMap.get(object);
// return if the handle exists and this is a logical assertion
- if ( (handle != null) && (logical) ) {
+ if ((handle != null) && (logical)) {
return handle;
}
// lets see if the object is already logical asserted
- Object logicalState = this.equalsMap.get( object );
+ Object logicalState = this.equalsMap.get(object);
// if we have a handle and this STATED fact was previously STATED
- if ( (handle != null) && (!logical) && logicalState == AbstractWorkingMemory.STATED ) {
+ if ((handle != null) && (!logical)
+ && logicalState == AbstractWorkingMemory.STATED) {
return handle;
}
- if ( !logical ) {
+ if (!logical) {
// If this stated assertion already has justifications then we need
// to cancel them
- if ( logicalState instanceof FactHandleImpl ) {
+ if (logicalState instanceof FactHandleImpl) {
handle = (FactHandleImpl) logicalState;
handle.removeAllLogicalDependencies();
} else {
- handle = (FactHandleImpl) newFactHandle( object );
+ handle = (FactHandleImpl) newFactHandle(object);
}
- putObject( handle,
- object );
+ putObject(handle, object);
- this.equalsMap.put( object,
- AbstractWorkingMemory.STATED );
+ this.equalsMap.put(object, AbstractWorkingMemory.STATED);
- if ( dynamic ) {
- addPropertyChangeListener( object );
+ if (dynamic) {
+ addPropertyChangeListener(object);
}
} else {
// This object is already STATED, we cannot make it justifieable
- if ( logicalState == AbstractWorkingMemory.STATED ) {
+ if (logicalState == AbstractWorkingMemory.STATED) {
return null;
}
handle = (FactHandleImpl) logicalState;
// we create a lookup handle for the first asserted equals object
// all future equals objects will use that handle
- if ( handle == null ) {
- handle = (FactHandleImpl) newFactHandle( object );
+ if (handle == null) {
+ handle = (FactHandleImpl) newFactHandle(object);
- putObject( handle,
- object );
+ putObject(handle, object);
- this.equalsMap.put( object,
- handle );
+ this.equalsMap.put(object, handle);
}
// adding logical dependency
LeapsTuple tuple = (LeapsTuple) activation.getTuple();
- tuple.addLogicalDependency( handle );
- handle.addLogicalDependency( tuple );
+ tuple.addLogicalDependency(handle);
+ handle.addLogicalDependency(tuple);
}
// leaps handle already has object attached
@@ -288,66 +274,64 @@ public FactHandle assertObject(Object object,
LeapsTuple tuple;
LeapsRule leapsRule;
Class objectClass = object.getClass();
- for ( Iterator tables = this.getFactTablesList( objectClass ).iterator(); tables.hasNext(); ) {
+ for (Iterator tables = this.getFactTablesList(objectClass).iterator(); tables
+ .hasNext();) {
FactTable factTable = (FactTable) tables.next();
// adding fact to container
- factTable.add( handle );
- // inspect all tuples for exists and not conditions and activate / deactivate
+ factTable.add(handle);
+ // inspect all tuples for exists and not conditions and activate /
+ // deactivate
// agenda items
ColumnConstraints constraint;
ColumnConstraints[] constraints;
- for ( Iterator tuples = factTable.getTuplesIterator(); tuples.hasNext(); ) {
+ for (Iterator tuples = factTable.getTuplesIterator(); tuples
+ .hasNext();) {
tuple = (LeapsTuple) tuples.next();
leapsRule = tuple.getLeapsRule();
// check not constraints
constraints = leapsRule.getNotColumnConstraints();
- for ( int i = 0, length = constraints.length; i < length; i++ ) {
+ for (int i = 0, length = constraints.length; i < length; i++) {
constraint = constraints[i];
- if ( objectClass.isAssignableFrom( constraint.getClassType() ) && constraint.isAllowed( handle,
- tuple,
- this ) ) {
- tuple.addNotFactHandle( handle,
- i );
- handle.addNotTuple( tuple,
- i );
+ if (objectClass.isAssignableFrom(constraint.getClassType())
+ && constraint.isAllowed(handle, tuple, this)) {
+ tuple.addNotFactHandle(handle, i);
+ handle.addNotTuple(tuple, i);
}
}
// check exists constraints
constraints = leapsRule.getExistsColumnConstraints();
- for ( int i = 0, length = constraints.length; i < length; i++ ) {
+ for (int i = 0, length = constraints.length; i < length; i++) {
constraint = constraints[i];
- if ( objectClass.isAssignableFrom( constraint.getClassType() ) && constraint.isAllowed( handle,
- tuple,
- this ) ) {
- tuple.addExistsFactHandle( handle,
- i );
- handle.addExistsTuple( tuple,
- i );
+ if (objectClass.isAssignableFrom(constraint.getClassType())
+ && constraint.isAllowed(handle, tuple, this)) {
+ tuple.addExistsFactHandle(handle, i);
+ handle.addExistsTuple(tuple, i);
}
}
// check and see if we need deactivate / activate
- if ( tuple.isReadyForActivation() && tuple.isActivationNull() ) {
+ if (tuple.isReadyForActivation() && tuple.isActivationNull()) {
// ready to activate
- this.assertTuple( tuple );
+ this.assertTuple(tuple);
// remove tuple from fact table
tuples.remove();
- } else if ( !tuple.isReadyForActivation() && !tuple.isActivationNull() ) {
+ } else if (!tuple.isReadyForActivation()
+ && !tuple.isActivationNull()) {
// time to pull from agenda
- this.invalidateActivation( tuple );
+ this.invalidateActivation(tuple);
}
}
}
// new leaps stack token
- this.pushTokenOnStack( new Token( this,
- handle ) );
-
- this.workingMemoryEventSupport.fireObjectAsserted( new PropagationContextImpl( ++this.propagationIdCounter,
- PropagationContext.ASSERTION,
- rule,
- activation ),
- handle,
- object );
+ PropagationContextImpl context = new PropagationContextImpl(
+ nextPropagationIdCounter(), PropagationContext.ASSERTION, rule,
+ activation);
+
+ this.pushTokenOnStack(new Token(this, handle, context));
+
+ this.workingMemoryEventSupport.fireObjectAsserted(context, handle,
+ object);
+
return handle;
}
@@ -359,66 +343,63 @@ public FactHandle assertObject(Object object,
* @param object
* The object.
*/
- Object putObject(FactHandle handle,
- Object object) {
+ Object putObject(FactHandle handle, Object object) {
- this.identityMap.put( object,
- handle );
+ this.identityMap.put(object, handle);
- return this.objects.put( ((FactHandleImpl) handle).getId(),
- object );
+ return this.objects.put(((FactHandleImpl) handle).getId(), object);
}
Object removeObject(FactHandle handle) {
- this.identityMap.remove( ((FactHandleImpl) handle).getObject() );
+ this.identityMap.remove(((FactHandleImpl) handle).getObject());
- return this.objects.remove( ((FactHandleImpl) handle).getId() );
+ return this.objects.remove(((FactHandleImpl) handle).getId());
}
/**
* @see WorkingMemory
*/
- public void retractObject(FactHandle handle,
- boolean removeLogical,
- boolean updateEqualsMap,
- Rule rule,
- Activation activation) throws FactException {
+ public void retractObject(FactHandle handle, boolean removeLogical,
+ boolean updateEqualsMap, Rule rule, Activation activation)
+ throws FactException {
//
- removePropertyChangeListener( handle );
+ removePropertyChangeListener(handle);
/*
* leaps specific actions
*/
// remove fact from all relevant fact tables container
- for ( Iterator it = this.getFactTablesList( ((FactHandleImpl) handle).getObject().getClass() ).iterator(); it.hasNext(); ) {
- ((FactTable) it.next()).remove( handle );
+ for (Iterator it = this.getFactTablesList(
+ ((FactHandleImpl) handle).getObject().getClass()).iterator(); it
+ .hasNext();) {
+ ((FactTable) it.next()).remove(handle);
}
// 0. remove activated tuples
Iterator tuples = ((FactHandleImpl) handle).getActivatedTuples();
- for ( ; tuples != null && tuples.hasNext(); ) {
- this.invalidateActivation( (LeapsTuple) tuples.next() );
+ for (; tuples != null && tuples.hasNext();) {
+ this.invalidateActivation((LeapsTuple) tuples.next());
}
// 1. remove fact for nots and exists tuples
FactHandleTupleAssembly assembly;
Iterator it;
it = ((FactHandleImpl) handle).getNotTuples();
- if ( it != null ) {
- for ( ; it.hasNext(); ) {
+ if (it != null) {
+ for (; it.hasNext();) {
assembly = (FactHandleTupleAssembly) it.next();
- assembly.getTuple().removeNotFactHandle( handle,
- assembly.getIndex() );
+ assembly.getTuple().removeNotFactHandle(handle,
+ assembly.getIndex());
}
}
it = ((FactHandleImpl) handle).getExistsTuples();
- if ( it != null ) {
- for ( ; it.hasNext(); ) {
+ if (it != null) {
+ for (; it.hasNext();) {
assembly = (FactHandleTupleAssembly) it.next();
- assembly.getTuple().removeExistsFactHandle( handle,
- assembly.getIndex() );
+ assembly.getTuple().removeExistsFactHandle(handle,
+ assembly.getIndex());
}
}
// 2. assert all tuples that are ready for activation or cancel ones
@@ -426,76 +407,73 @@ public void retractObject(FactHandle handle,
LeapsTuple tuple;
IteratorChain chain = new IteratorChain();
it = ((FactHandleImpl) handle).getNotTuples();
- if ( it != null ) {
- chain.addIterator( it );
+ if (it != null) {
+ chain.addIterator(it);
}
it = ((FactHandleImpl) handle).getExistsTuples();
- if ( it != null ) {
- chain.addIterator( it );
+ if (it != null) {
+ chain.addIterator(it);
}
- for ( ; chain.hasNext(); ) {
+ for (; chain.hasNext();) {
tuple = ((FactHandleTupleAssembly) chain.next()).getTuple();
- if ( tuple.isReadyForActivation() && tuple.isActivationNull() ) {
+ if (tuple.isReadyForActivation() && tuple.isActivationNull()) {
// ready to activate
- this.assertTuple( tuple );
+ this.assertTuple(tuple);
} else {
// time to pull from agenda
- this.invalidateActivation( tuple );
+ this.invalidateActivation(tuple);
}
}
// remove it from stack
- this.stack.remove( new Token( this,
- (FactHandleImpl) handle ) );
+ this.stack.remove(new Token(this, (FactHandleImpl) handle, null));
//
// end leaps specific actions
//
- Object oldObject = removeObject( handle );
+ Object oldObject = removeObject(handle);
/* check to see if this was a logical asserted object */
- if ( removeLogical ) {
- this.equalsMap.remove( oldObject );
+ if (removeLogical) {
+ this.equalsMap.remove(oldObject);
}
- if ( updateEqualsMap ) {
- this.equalsMap.remove( oldObject );
+ if (updateEqualsMap) {
+ this.equalsMap.remove(oldObject);
}
// not applicable to leaps implementation
// this.factHandlePool.push( ((FactHandleImpl) handle).getId() );
- PropagationContextImpl context = new PropagationContextImpl( ++this.propagationIdCounter,
- PropagationContext.RETRACTION,
- rule,
- activation );
-
- this.workingMemoryEventSupport.fireObjectRetracted( context,
- handle,
- oldObject );
+ PropagationContextImpl context = new PropagationContextImpl(
+ nextPropagationIdCounter(), PropagationContext.RETRACTION,
+ rule, activation);
+
+ this.workingMemoryEventSupport.fireObjectRetracted(context, handle,
+ oldObject);
// not applicable to leaps fact handle
// ((FactHandleImpl) handle).invalidate();
}
private void invalidateActivation(LeapsTuple tuple) {
- if ( !tuple.isReadyForActivation() && !tuple.isActivationNull() ) {
+ if (!tuple.isReadyForActivation() && !tuple.isActivationNull()) {
Activation activation = tuple.getActivation();
// invalidate agenda agendaItem
- if ( activation.isActivated() ) {
+ if (activation.isActivated()) {
activation.remove();
- getAgendaEventSupport().fireActivationCancelled( activation );
+ getAgendaEventSupport().fireActivationCancelled(activation);
}
//
- tuple.setActivation( null );
+ tuple.setActivation(null);
}
// remove logical dependency
FactHandleImpl factHandle;
Iterator it = tuple.getLogicalDependencies();
- if ( it != null ) {
- for ( ; it.hasNext(); ) {
+ if (it != null) {
+ for (; it.hasNext();) {
factHandle = (FactHandleImpl) it.next();
- factHandle.removeLogicalDependency( tuple );
- if ( !factHandle.isLogicalyValid() ) {
- this.retractObject( factHandle );
+ factHandle.removeLogicalDependency(tuple);
+ if (!factHandle.isLogicalyValid()) {
+ this.retractObject(factHandle);
}
}
}
@@ -504,45 +482,38 @@ private void invalidateActivation(LeapsTuple tuple) {
/**
* @see WorkingMemory
*/
- public void modifyObject(FactHandle handle,
- Object object,
- Rule rule,
- Activation activation) throws FactException {
+ public void modifyObject(FactHandle handle, Object object, Rule rule,
+ Activation activation) throws FactException {
- this.retractObject( handle );
+ this.retractObject(handle);
- this.assertObject( object );
+ this.assertObject(object, false, false, rule, activation);
/*
* this.ruleBase.modifyObject( handle, object, this );
*/
- this.workingMemoryEventSupport.fireObjectModified( new PropagationContextImpl( ++this.propagationIdCounter,
- PropagationContext.MODIFICATION,
- rule,
- activation ),
- handle,
- ((FactHandleImpl) handle).getObject(),
- object );
+ this.workingMemoryEventSupport.fireObjectModified(
+ new PropagationContextImpl(nextPropagationIdCounter(),
+ PropagationContext.MODIFICATION, rule, activation),
+ handle, ((FactHandleImpl) handle).getObject(), object);
}
/**
- * leaps section
+ * ************* leaps section *********************
*/
+ private final Object lock = new Object();
- private final Object lock = new Object();
-
- // private long idsSequence;
-
- private long idLastFireAllAt = -1;
+ private long idLastFireAllAt = -1;
/**
- * algorithm stack. TreeSet is used to facilitate dynamic rule add/remove
+ * algorithm stack.
*/
+ private Stack stack = new Stack();
- private Stack stack = new Stack();
-
- // to store facts to cursor over it
- private final Hashtable factTables = new Hashtable();
+ /**
+ * to store facts to cursor over it
+ */
+ private final Hashtable factTables = new Hashtable();
/**
* generates or just return List of internal factTables that correspond a
@@ -553,9 +524,9 @@ public void modifyObject(FactHandle handle,
protected List getFactTablesList(Class c) {
ArrayList list = new ArrayList();
Class bufClass = c;
- while ( bufClass != null ) {
+ while (bufClass != null) {
//
- list.add( this.getFactTable( bufClass ) );
+ list.add(this.getFactTable(bufClass));
// and get the next class on the list
bufClass = bufClass.getSuperclass();
}
@@ -568,7 +539,7 @@ protected List getFactTablesList(Class c) {
* @param token
*/
protected void pushTokenOnStack(Token token) {
- this.stack.push( token );
+ this.stack.push(token);
}
/**
@@ -580,12 +551,11 @@ protected void pushTokenOnStack(Token token) {
*/
protected FactTable getFactTable(Class c) {
FactTable table;
- if ( this.factTables.containsKey( c ) ) {
- table = (FactTable) this.factTables.get( c );
+ if (this.factTables.containsKey(c)) {
+ table = (FactTable) this.factTables.get(c);
} else {
- table = new FactTable( DefaultConflictResolver.getInstance() );
- this.factTables.put( c,
- table );
+ table = new FactTable(DefaultConflictResolver.getInstance());
+ this.factTables.put(c, table);
}
return table;
@@ -597,59 +567,62 @@ protected FactTable getFactTable(Class c) {
* @param rules
*/
protected void addLeapsRules(List rules) {
- synchronized ( this.lock ) {
+ synchronized (this.lock) {
ArrayList ruleHandlesList;
LeapsRule rule;
RuleHandle ruleHandle;
- for ( Iterator it = rules.iterator(); it.hasNext(); ) {
+ for (Iterator it = rules.iterator(); it.hasNext();) {
rule = (LeapsRule) it.next();
// some times rules do not have "normal" constraints and only
// not and exists
- if ( rule.getNumberOfColumns() > 0 ) {
+ if (rule.getNumberOfColumns() > 0) {
ruleHandlesList = new ArrayList();
- for ( int i = 0; i < rule.getNumberOfColumns(); i++ ) {
- ruleHandle = new RuleHandle( ((HandleFactory) this.handleFactory).getNextId(),
- rule,
- i );
+ for (int i = 0; i < rule.getNumberOfColumns(); i++) {
+ ruleHandle = new RuleHandle(
+ ((HandleFactory) this.handleFactory)
+ .getNextId(), rule, i);
//
- this.getFactTable( rule.getColumnClassObjectTypeAtPosition( i ) ).addRule( this,
- ruleHandle );
+ this.getFactTable(
+ rule.getColumnClassObjectTypeAtPosition(i))
+ .addRule(this, ruleHandle);
//
- ruleHandlesList.add( ruleHandle );
+ ruleHandlesList.add(ruleHandle);
}
- this.leapsRulesToHandlesMap.put( rule,
- ruleHandlesList );
+ this.leapsRulesToHandlesMap.put(rule, ruleHandlesList);
} else {
- ruleHandle = new RuleHandle( ((HandleFactory) this.handleFactory).getNextId(),
- rule,
- -1 );
- this.noRequiredColumnsLeapsRules.add( ruleHandle );
- this.leapsRulesToHandlesMap.put( rule,
- ruleHandle );
+ ruleHandle = new RuleHandle(
+ ((HandleFactory) this.handleFactory).getNextId(),
+ rule, -1);
+ this.noRequiredColumnsLeapsRules.add(ruleHandle);
+ this.leapsRulesToHandlesMap.put(rule, ruleHandle);
}
}
}
}
protected void removeRule(List rules) {
- synchronized ( this.lock ) {
+ synchronized (this.lock) {
ArrayList ruleHandlesList;
LeapsRule rule;
RuleHandle ruleHandle;
- for ( Iterator it = rules.iterator(); it.hasNext(); ) {
+ for (Iterator it = rules.iterator(); it.hasNext();) {
rule = (LeapsRule) it.next();
// some times rules do not have "normal" constraints and only
// not and exists
- if ( rule.getNumberOfColumns() > 0 ) {
- ruleHandlesList = (ArrayList) this.leapsRulesToHandlesMap.remove( rule );
- for ( int i = 0; i < ruleHandlesList.size(); i++ ) {
- ruleHandle = (RuleHandle) ruleHandlesList.get( i );
+ if (rule.getNumberOfColumns() > 0) {
+ ruleHandlesList = (ArrayList) this.leapsRulesToHandlesMap
+ .remove(rule);
+ for (int i = 0; i < ruleHandlesList.size(); i++) {
+ ruleHandle = (RuleHandle) ruleHandlesList.get(i);
//
- this.getFactTable( rule.getColumnClassObjectTypeAtPosition( i ) ).removeRule( ruleHandle );
+ this.getFactTable(
+ rule.getColumnClassObjectTypeAtPosition(i))
+ .removeRule(ruleHandle);
}
} else {
- ruleHandle = (RuleHandle) this.leapsRulesToHandlesMap.remove( rule );
- this.noRequiredColumnsLeapsRules.remove( ruleHandle );
+ ruleHandle = (RuleHandle) this.leapsRulesToHandlesMap
+ .remove(rule);
+ this.noRequiredColumnsLeapsRules.remove(ruleHandle);
}
}
}
@@ -665,65 +638,64 @@ public void fireAllRules(AgendaFilter agendaFilter) throws FactException {
// nested inside, avoiding concurrent-modification
// exceptions, depending on code paths of the actions.
- if ( !this.firing ) {
+ if (!this.firing) {
try {
this.firing = true;
// normal rules with required columns
- while ( !this.stack.isEmpty() ) {
+ while (!this.stack.isEmpty()) {
Token token = (Token) this.stack.peek();
boolean done = false;
- while ( !done ) {
- if ( !token.isResume() ) {
- if ( token.hasNextRuleHandle() ) {
+ while (!done) {
+ if (!token.isResume()) {
+ if (token.hasNextRuleHandle()) {
token.nextRuleHandle();
} else {
// we do not pop because something might get
// asserted
// and placed on hte top of the stack during
// firing
- this.stack.remove( token );
+ this.stack.remove(token);
done = true;
}
}
- if ( !done ) {
+ if (!done) {
try {
// ok. now we have tuple, dominant fact and
// rules and ready to seek to checks if any
// agendaItem
// matches on current rule
- TokenEvaluator.evaluate( token );
+ TokenEvaluator.evaluate(token);
// something was found so set marks for
// resume processing
- if ( token.getDominantFactHandle() != null ) {
- token.setResume( true );
+ if (token.getDominantFactHandle() != null) {
+ token.setResume(true);
done = true;
}
- } catch ( NoMatchesFoundException ex ) {
- token.setResume( false );
- } catch ( Exception e ) {
- e.printStackTrace();
- System.out.println( "exception - " + e );
+ } catch (NoMatchesFoundException ex) {
+ token.setResume(false);
}
}
// we put everything on agenda
// and if there is no modules or anything like it
// it would fire just activated rule
- while ( this.agenda.fireNextItem( agendaFilter ) ) {
+ while (this.agenda.fireNextItem(agendaFilter)) {
;
}
}
}
- // pick activations generated by retraction
- // retraction does not put tokens on stack but
+ // pick activations generated by retraction
+ // retraction does not put tokens on stack but
// can generate activations off exists and not pending tuples
- while ( this.agenda.fireNextItem( agendaFilter ) ) {
+ while (this.agenda.fireNextItem(agendaFilter)) {
;
}
// mark when method was called last time
- this.idLastFireAllAt = ((HandleFactory) this.handleFactory).getNextId();
+ this.idLastFireAllAt = ((HandleFactory) this.handleFactory)
+ .getNextId();
// set all factTables to be reseeded
- for ( Enumeration e = this.factTables.elements(); e.hasMoreElements(); ) {
- ((FactTable) e.nextElement()).setReseededStack( true );
+ for (Enumeration e = this.factTables.elements(); e
+ .hasMoreElements();) {
+ ((FactTable) e.nextElement()).setReseededStack(true);
}
} finally {
this.firing = false;
@@ -740,13 +712,13 @@ public String toString() {
Object key;
ret = ret + "\n" + "Working memory";
ret = ret + "\n" + "Fact Tables by types:";
- for ( Enumeration e = this.factTables.keys(); e.hasMoreElements(); ) {
+ for (Enumeration e = this.factTables.keys(); e.hasMoreElements();) {
key = e.nextElement();
ret = ret + "\n" + "****************** " + key;
- ret = ret + ((FactTable) this.factTables.get( key )).toString();
+ ret = ret + ((FactTable) this.factTables.get(key)).toString();
}
ret = ret + "\n" + "Stack:";
- for ( Iterator it = this.stack.iterator(); it.hasNext(); ) {
+ for (Iterator it = this.stack.iterator(); it.hasNext();) {
ret = ret + "\n" + "\t" + it.next();
}
return ret;
@@ -764,109 +736,107 @@ public String toString() {
*/
public void assertTuple(LeapsTuple tuple) {
PropagationContext context = tuple.getContext();
- Rule rule = context.getRuleOrigin();
+ Rule rule = tuple.getLeapsRule().getRule();
// if the current Rule is no-loop and the origin rule is the same then
// return
- if ( rule.getNoLoop() && rule.equals( context.getRuleOrigin() ) ) {
+ if (rule.getNoLoop() && rule.equals(context.getRuleOrigin())) {
return;
}
Duration dur = rule.getDuration();
Activation agendaItem;
- if ( dur != null && dur.getDuration( tuple ) > 0 ) {
- agendaItem = new ScheduledAgendaItem( context.getPropagationNumber(),
- tuple,
- this.agenda,
- context,
- rule );
- this.agenda.scheduleItem( (ScheduledAgendaItem) agendaItem );
- tuple.setActivation( agendaItem );
- agendaItem.setActivated( true );
- this.getAgendaEventSupport().fireActivationCreated( agendaItem );
+ if (dur != null && dur.getDuration(tuple) > 0) {
+ agendaItem = new ScheduledAgendaItem(
+ context.getPropagationNumber(), tuple, this.agenda,
+ context, rule);
+ this.agenda.scheduleItem((ScheduledAgendaItem) agendaItem);
+ tuple.setActivation(agendaItem);
+ agendaItem.setActivated(true);
+ this.getAgendaEventSupport().fireActivationCreated(agendaItem);
} else {
- // -----------------
- // Lazy instantiation and addition to the Agenda of AgendGroup
- // implementations
- // ----------------
- AgendaGroupImpl agendaGroup = null;
- if ( rule.getAgendaGroup() == null || rule.getAgendaGroup().equals( "" ) || rule.getAgendaGroup().equals( AgendaGroup.MAIN ) ) {
- // Is the Rule AgendaGroup undefined? If it is use MAIN, which
- // is added to the Agenda by default
- agendaGroup = (AgendaGroupImpl) this.agenda.getAgendaGroup( AgendaGroup.MAIN );
- } else {
- // AgendaGroup is defined, so try and get the AgendaGroup from
- // the Agenda
- agendaGroup = (AgendaGroupImpl) this.agenda.getAgendaGroup( rule.getAgendaGroup() );
- }
+ LeapsRule leapsRule = tuple.getLeapsRule();
+ AgendaGroupImpl agendaGroup = leapsRule.getAgendaGroup();
+ if (agendaGroup == null) {
+ if (rule.getAgendaGroup() == null
+ || rule.getAgendaGroup().equals("")
+ || rule.getAgendaGroup().equals(AgendaGroup.MAIN)) {
+ // Is the Rule AgendaGroup undefined? If it is use MAIN,
+ // which is added to the Agenda by default
+ agendaGroup = (AgendaGroupImpl) this.agenda
+ .getAgendaGroup(AgendaGroup.MAIN);
+ } else {
+ // AgendaGroup is defined, so try and get the AgendaGroup
+ // from the Agenda
+ agendaGroup = (AgendaGroupImpl) this.agenda
+ .getAgendaGroup(rule.getAgendaGroup());
+ }
- if ( agendaGroup == null ) {
- // The AgendaGroup is defined but not yet added to the Agenda,
- // so create the AgendaGroup and add to the Agenda.
- agendaGroup = new AgendaGroupImpl( rule.getAgendaGroup() );
- this.agenda.addAgendaGroup( agendaGroup );
+ if (agendaGroup == null) {
+ // The AgendaGroup is defined but not yet added to the
+ // Agenda, so create the AgendaGroup and add to the Agenda.
+ agendaGroup = new AgendaGroupImpl(rule.getAgendaGroup());
+ this.getAgenda().addAgendaGroup(agendaGroup);
+ }
+
+ leapsRule.setAgendaGroup(agendaGroup);
}
// set the focus if rule autoFocus is true
- if ( rule.getAutoFocus() ) {
- this.agenda.setFocus( agendaGroup );
+ if (rule.getAutoFocus()) {
+ this.agenda.setFocus(agendaGroup);
+ }
+
+ // Lazy assignment of the AgendaGroup's Activation Lifo Queue
+ if (leapsRule.getLifo() == null) {
+ leapsRule.setLifo(agendaGroup.getActivationQueue(rule
+ .getSalience()));
}
- ActivationQueue queue = agendaGroup.getActivationQueue( rule.getSalience() );
- agendaItem = new AgendaItem( context.getPropagationNumber(),
- tuple,
- context,
- rule,
- queue );
+ ActivationQueue queue = leapsRule.getLifo();
- queue.add( agendaItem );
+ agendaItem = new AgendaItem(context.getPropagationNumber(), tuple,
+ context, rule, queue);
+
+ queue.add(agendaItem);
// Makes sure the Lifo is added to the AgendaGroup priority queue
// If the AgendaGroup is already in the priority queue it just
// returns.
- agendaGroup.addToAgenda( queue );
- tuple.setActivation( agendaItem );
- agendaItem.setActivated( true );
- this.getAgendaEventSupport().fireActivationCreated( agendaItem );
+
+ agendaGroup.addToAgenda(leapsRule.getLifo());
+ tuple.setActivation(agendaItem);
+ agendaItem.setActivated(true);
+ this.getAgendaEventSupport().fireActivationCreated(agendaItem);
+
// retract support
- FactHandleImpl[] factHandles = (FactHandleImpl[]) tuple.getFactHandles();
- for ( int i = 0; i < factHandles.length; i++ ) {
- factHandles[i].addActivatedTuple( tuple );
+ FactHandleImpl[] factHandles = (FactHandleImpl[]) tuple
+ .getFactHandles();
+ for (int i = 0; i < factHandles.length; i++) {
+ factHandles[i].addActivatedTuple(tuple);
}
}
}
- protected long increamentPropagationIdCounter() {
+ protected long nextPropagationIdCounter() {
return ++this.propagationIdCounter;
}
public void dispose() {
- ((RuleBaseImpl) this.ruleBase).disposeWorkingMemory( this );
- }
-
- /**
- * Retrieve the rule-firing <code>Agenda</code> for this
- * <code>WorkingMemory</code>.
- *
- * @return The <code>Agenda</code>.
- */
- public Agenda getAgenda() {
- return this.agenda;
+ ((RuleBaseImpl) this.ruleBase).disposeWorkingMemory(this);
}
public List getQueryResults(String query) {
- return (List) this.queryResults.remove( query );
+ return (List) this.queryResults.remove(query);
}
- void addToQueryResults(String query,
- Tuple tuple) {
- LinkedList list = (LinkedList) this.queryResults.get( query );
- if ( list == null ) {
+ void addToQueryResults(String query, Tuple tuple) {
+ LinkedList list = (LinkedList) this.queryResults.get(query);
+ if (list == null) {
list = new LinkedList();
- this.queryResults.put( query,
- list );
+ this.queryResults.put(query, list);
}
- list.add( tuple );
+ list.add(tuple);
}
protected TableIterator getNoRequiredColumnsLeapsRules() {
@@ -876,12 +846,17 @@ protected TableIterator getNoRequiredColumnsLeapsRules() {
public AgendaGroup getFocus() {
return this.agenda.getFocus();
}
-
+
public void setFocus(String focus) {
- this.agenda.setFocus( focus );
+ this.agenda.setFocus(focus);
}
-
+
public void setFocus(AgendaGroup focus) {
- this.agenda.setFocus( focus );
- }
+ this.agenda.setFocus(focus);
+ }
+
+ public Agenda getAgenda() {
+ return this.agenda;
+ }
+
}
diff --git a/drools-core/src/main/java/org/drools/leaps/util/Table.java b/drools-core/src/main/java/org/drools/leaps/util/Table.java
index 8aa1a1473b6..ac3276aae63 100644
--- a/drools-core/src/main/java/org/drools/leaps/util/Table.java
+++ b/drools-core/src/main/java/org/drools/leaps/util/Table.java
@@ -28,298 +28,301 @@
/**
*
* @author Alexander Bagerman
- *
+ *
*/
public class Table implements Serializable {
- private final TreeMap map;
-
- protected TableRecord headRecord;
-
- protected TableRecord tailRecord;
-
- private boolean empty = true;
-
- private int count = 0;
-
- public Table(Comparator comparator) {
- this.map = new TreeMap(comparator);
- }
-
- protected void clear() {
- this.headRecord = new TableRecord(null);
- this.empty = true;
- this.count = 0;
- this.map.clear();
- }
-
- /**
- * @param object
- * to add
- */
- public void add(Object object) {
- boolean foundEqualObject = false;
- TableRecord newRecord = new TableRecord(object);
- if (this.empty) {
- this.headRecord = newRecord;
- this.empty = false;
- } else {
- SortedMap bufMap = this.map.headMap(object);
- if (!bufMap.isEmpty()) {
- TableRecord bufRec = (TableRecord) this.map.get(bufMap.lastKey());
- if (bufRec.right != null) {
- bufRec.right.left = newRecord;
- }
- newRecord.right = bufRec.right;
- bufRec.right = newRecord;
- newRecord.left = bufRec;
-
- } else {
- this.headRecord.left = newRecord;
- newRecord.right = this.headRecord;
- this.headRecord = newRecord;
- }
- }
- if (!foundEqualObject) {
- // check if the new record was added at the end of the list
- // and assign new value to the tail record
- if (newRecord.right == null) {
- this.tailRecord = newRecord;
- }
- //
- this.count++;
- //
- this.map.put(object, newRecord);
- }
- }
-
- /**
- * Removes object from the table
- *
- * @param object
- * to remove from the table
- */
- public void remove(Object object) {
- if (!this.empty) {
- TableRecord record = (TableRecord) this.map.get(object);
-
- if (record != null) {
- if (record == this.headRecord) {
- if (record.right != null) {
- this.headRecord = record.right;
- this.headRecord.left = null;
- } else {
- // single element in table being valid
- // table is empty now
- this.headRecord = new TableRecord(null);
- this.tailRecord = this.headRecord;
- this.empty = true;
- }
- } else if (record == this.tailRecord) {
- // single element in the table case is being solved above
- // when
- // we checked for headRecord match
- this.tailRecord = record.left;
- this.tailRecord.right = null;
- } else {
- // left
- record.left.right = record.right;
- record.right.left = record.left;
- }
- }
- this.count--;
- //
- this.map.remove(object);
- }
- }
-
- /**
- * @param object
- * @return indicator of presence of given object in the table
- */
- public boolean contains(Object object) {
- boolean ret = false;
- if (!this.empty) {
- ret = this.map.containsKey(object);
- }
- return ret;
- }
-
- /**
- * @return TableIterator for this Table
- * @see org.drools.leaps.util.TableIterator
- * @see org.drools.leaps.util.BaseTableIterator
- */
- public TableIterator iterator() {
- TableIterator ret;
- if (this.empty) {
- ret = new BaseTableIterator(null, null, null);
- } else {
- ret = new BaseTableIterator(this.headRecord, this.headRecord,
- this.tailRecord);
- }
- return ret;
- }
-
- /**
- * iterator over "tail" part of the table data.
- *
- * @param objectAtStart -
- * upper boundary of the iteration
- * @param objectAtPosition -
- * starting point of the iteration
- * @return leaps table iterator
- * @throws TableOutOfBoundException
- */
- class Markers {
- TableRecord start;
- TableRecord current;
- TableRecord last;
- }
-
- public TableIterator tailConstrainedIterator(WorkingMemory workingMemory,
- ColumnConstraints constraints, Object objectAtStart,
- Object objectAtPosition) throws TableOutOfBoundException {
- Markers markers = this.getTailIteratorMarkers(objectAtStart,
- objectAtPosition);
- return new ConstrainedFactTableIterator(workingMemory, constraints,
- markers.start, markers.current, markers.last);
-
- }
-
- public TableIterator tailIterator(Object objectAtStart,
- Object objectAtPosition) throws TableOutOfBoundException {
- Markers markers = this.getTailIteratorMarkers(objectAtStart, objectAtPosition);
- return new BaseTableIterator(markers.start, markers.current,
- markers.last);
- }
-
-
- private Markers getTailIteratorMarkers(Object objectAtStart,
- Object objectAtPosition) throws TableOutOfBoundException {
- // validate
- Markers ret = new Markers();
- ret.start = null;
- ret.current = null;
- ret.last = null;
- //
- if (this.map.comparator().compare(objectAtStart, objectAtPosition) > 0) {
- throw new TableOutOfBoundException(
- "object at position is out of upper bound");
- }
- TableRecord startRecord = null;
- TableRecord currentRecord = null;
- TableRecord lastRecord = this.tailRecord;
-
- if (!this.empty) { // validate
- // if (!this.map.isEmpty()) { // validate
- if (this.map.comparator().compare(objectAtStart,
- this.tailRecord.object) <= 0) {
- // let's check if we need iterator over the whole table
- SortedMap bufMap = this.map.tailMap(objectAtStart);
- if (!bufMap.isEmpty()) {
- startRecord = (TableRecord) bufMap.get(bufMap.firstKey());
- if (this.map.comparator().compare(objectAtStart,
- objectAtPosition) == 0) {
- currentRecord = startRecord;
- } else {
- // rewind to position
- bufMap = bufMap.tailMap(objectAtPosition);
-
- if (!bufMap.isEmpty()) {
- currentRecord = ((TableRecord) bufMap.get(bufMap
- .firstKey()));
- } else {
- currentRecord = startRecord;
- }
- }
- ret.start = startRecord;
- ret.current = currentRecord;
- ret.last = lastRecord;
- }
- }
- }
-
- return ret;
- }
-
- /**
- * iterator over "head" part of the table data. it does not take
- * "positional" parameter because it's used for scanning shadow tables and
- * this scan never "resumes"
- *
- * @param objectAtEnd -
- * lower boundary of the iteration
- * @return leaps table iterator
- */
- public TableIterator headIterator(Object objectAtEnd) {
- TableIterator iterator = null;
- TableRecord startRecord = this.headRecord;
- TableRecord currentRecord = this.headRecord;
- TableRecord lastRecord = null;
-
- if (!this.empty) { // validate
- if (this.map.comparator().compare(this.headRecord.object,
- objectAtEnd) <= 0) {
- // let's check if we need iterator over the whole table
- SortedMap bufMap = this.map.headMap(objectAtEnd);
- if (!bufMap.isEmpty()) {
- lastRecord = (TableRecord) bufMap.get(bufMap.lastKey());
- // check if the next one is what we need
- if (lastRecord.right != null
- && this.map.comparator().compare(
- lastRecord.right.object, objectAtEnd) == 0) {
- lastRecord = lastRecord.right;
- }
- iterator = new BaseTableIterator(startRecord, currentRecord,
- lastRecord);
- } else {
- // empty iterator
- iterator = new BaseTableIterator(null, null, null);
- }
- } else {
- // empty iterator
- iterator = new BaseTableIterator(null, null, null);
- }
- } else {
- // empty iterator
- iterator = new BaseTableIterator(null, null, null);
- }
-
- return iterator;
- }
-
- /**
- * indicates if table has any elements
- *
- * @return empty indicator
- */
- public boolean isEmpty() {
- return this.empty;
- }
-
- public String toString() {
- String ret = "";
-
- for (Iterator it = this.iterator(); it.hasNext();) {
- ret = ret + it.next() + "\n";
- }
- return ret;
- }
-
- public int size() {
- return this.count;
- }
-
- public Object top() {
- return this.headRecord.object;
- }
-
- public Object bottom() {
- return this.tailRecord.object;
- }
-
- public static TableIterator singleItemIterator(Object object){
- return new BaseTableIterator(new TableRecord(object));
- }
+ private final TreeMap map;
+
+ protected TableRecord headRecord;
+
+ protected TableRecord tailRecord;
+
+ private boolean empty = true;
+
+ private int count = 0;
+
+ public Table(Comparator comparator) {
+ this.map = new TreeMap(comparator);
+ }
+
+ protected void clear() {
+ this.headRecord = new TableRecord(null);
+ this.empty = true;
+ this.count = 0;
+ this.map.clear();
+ }
+
+ /**
+ * @param object
+ * to add
+ */
+ public void add(Object object) {
+ boolean foundEqualObject = false;
+ TableRecord newRecord = new TableRecord(object);
+ if (this.empty) {
+ this.headRecord = newRecord;
+ this.empty = false;
+ } else {
+ SortedMap bufMap = this.map.headMap(object);
+ if (!bufMap.isEmpty()) {
+ TableRecord bufRec = (TableRecord) this.map.get(bufMap
+ .lastKey());
+ if (bufRec.right != null) {
+ bufRec.right.left = newRecord;
+ }
+ newRecord.right = bufRec.right;
+ bufRec.right = newRecord;
+ newRecord.left = bufRec;
+
+ } else {
+ this.headRecord.left = newRecord;
+ newRecord.right = this.headRecord;
+ this.headRecord = newRecord;
+ }
+ }
+ if (!foundEqualObject) {
+ // check if the new record was added at the end of the list
+ // and assign new value to the tail record
+ if (newRecord.right == null) {
+ this.tailRecord = newRecord;
+ }
+ //
+ this.count++;
+ //
+ this.map.put(object, newRecord);
+ }
+ }
+
+ /**
+ * Removes object from the table
+ *
+ * @param object
+ * to remove from the table
+ */
+ public void remove(Object object) {
+ if (!this.empty) {
+ TableRecord record = (TableRecord) this.map.get(object);
+
+ if (record != null) {
+ if (record == this.headRecord) {
+ if (record.right != null) {
+ this.headRecord = record.right;
+ this.headRecord.left = null;
+ } else {
+ // single element in table being valid
+ // table is empty now
+ this.headRecord = new TableRecord(null);
+ this.tailRecord = this.headRecord;
+ this.empty = true;
+ }
+ } else if (record == this.tailRecord) {
+ // single element in the table case is being solved above
+ // when
+ // we checked for headRecord match
+ this.tailRecord = record.left;
+ this.tailRecord.right = null;
+ } else {
+ // left
+ record.left.right = record.right;
+ record.right.left = record.left;
+ }
+ }
+ this.count--;
+ //
+ this.map.remove(object);
+ }
+ }
+
+ /**
+ * @param object
+ * @return indicator of presence of given object in the table
+ */
+ public boolean contains(Object object) {
+ boolean ret = false;
+ if (!this.empty) {
+ ret = this.map.containsKey(object);
+ }
+ return ret;
+ }
+
+ /**
+ * @return TableIterator for this Table
+ * @see org.drools.leaps.util.TableIterator
+ * @see org.drools.leaps.util.BaseTableIterator
+ */
+ public TableIterator iterator() {
+ TableIterator ret;
+ if (this.empty) {
+ ret = new BaseTableIterator(null, null, null);
+ } else {
+ ret = new BaseTableIterator(this.headRecord, this.headRecord,
+ this.tailRecord);
+ }
+ return ret;
+ }
+
+ /**
+ * iterator over "tail" part of the table data.
+ *
+ * @param objectAtStart -
+ * upper boundary of the iteration
+ * @param objectAtPosition -
+ * starting point of the iteration
+ * @return leaps table iterator
+ * @throws TableOutOfBoundException
+ */
+ class Markers {
+ TableRecord start;
+
+ TableRecord current;
+
+ TableRecord last;
+ }
+
+ public TableIterator tailConstrainedIterator(WorkingMemory workingMemory,
+ ColumnConstraints constraints, Object objectAtStart,
+ Object objectAtPosition) {
+ Markers markers = this.getTailIteratorMarkers(objectAtStart,
+ objectAtPosition);
+ return new ConstrainedFactTableIterator(workingMemory, constraints,
+ markers.start, markers.current, markers.last);
+
+ }
+
+ public TableIterator tailIterator(Object objectAtStart,
+ Object objectAtPosition) {
+ Markers markers = this.getTailIteratorMarkers(objectAtStart,
+ objectAtPosition);
+ return new BaseTableIterator(markers.start, markers.current,
+ markers.last);
+ }
+
+ private Markers getTailIteratorMarkers(Object objectAtStart,
+ Object objectAtPosition) {
+ // validate
+ Markers ret = new Markers();
+ ret.start = null;
+ ret.current = null;
+ ret.last = null;
+ //
+ if (this.map.comparator().compare(objectAtStart, objectAtPosition) > 0) {
+ // return empty iterator
+ return ret;
+ }
+ TableRecord startRecord = null;
+ TableRecord currentRecord = null;
+ TableRecord lastRecord = this.tailRecord;
+
+ if (!this.empty) { // validate
+ // if (!this.map.isEmpty()) { // validate
+ if (this.map.comparator().compare(objectAtStart,
+ this.tailRecord.object) <= 0) {
+ // let's check if we need iterator over the whole table
+ SortedMap bufMap = this.map.tailMap(objectAtStart);
+ if (!bufMap.isEmpty()) {
+ startRecord = (TableRecord) bufMap.get(bufMap.firstKey());
+ if (this.map.comparator().compare(objectAtStart,
+ objectAtPosition) == 0) {
+ currentRecord = startRecord;
+ } else {
+ // rewind to position
+ bufMap = bufMap.tailMap(objectAtPosition);
+
+ if (!bufMap.isEmpty()) {
+ currentRecord = ((TableRecord) bufMap.get(bufMap
+ .firstKey()));
+ } else {
+ currentRecord = startRecord;
+ }
+ }
+ ret.start = startRecord;
+ ret.current = currentRecord;
+ ret.last = lastRecord;
+ }
+ }
+ }
+
+ return ret;
+ }
+
+ /**
+ * iterator over "head" part of the table data. it does not take
+ * "positional" parameter because it's used for scanning shadow tables and
+ * this scan never "resumes"
+ *
+ * @param objectAtEnd -
+ * lower boundary of the iteration
+ * @return leaps table iterator
+ */
+ public TableIterator headIterator(Object objectAtEnd) {
+ TableIterator iterator = null;
+ TableRecord startRecord = this.headRecord;
+ TableRecord currentRecord = this.headRecord;
+ TableRecord lastRecord = null;
+
+ if (!this.empty) { // validate
+ if (this.map.comparator().compare(this.headRecord.object,
+ objectAtEnd) <= 0) {
+ // let's check if we need iterator over the whole table
+ SortedMap bufMap = this.map.headMap(objectAtEnd);
+ if (!bufMap.isEmpty()) {
+ lastRecord = (TableRecord) bufMap.get(bufMap.lastKey());
+ // check if the next one is what we need
+ if (lastRecord.right != null
+ && this.map.comparator().compare(
+ lastRecord.right.object, objectAtEnd) == 0) {
+ lastRecord = lastRecord.right;
+ }
+ iterator = new BaseTableIterator(startRecord,
+ currentRecord, lastRecord);
+ } else {
+ // empty iterator
+ iterator = new BaseTableIterator(null, null, null);
+ }
+ } else {
+ // empty iterator
+ iterator = new BaseTableIterator(null, null, null);
+ }
+ } else {
+ // empty iterator
+ iterator = new BaseTableIterator(null, null, null);
+ }
+
+ return iterator;
+ }
+
+ /**
+ * indicates if table has any elements
+ *
+ * @return empty indicator
+ */
+ public boolean isEmpty() {
+ return this.empty;
+ }
+
+ public String toString() {
+ String ret = "";
+
+ for (Iterator it = this.iterator(); it.hasNext();) {
+ ret = ret + it.next() + "\n";
+ }
+ return ret;
+ }
+
+ public int size() {
+ return this.count;
+ }
+
+ public Object top() {
+ return this.headRecord.object;
+ }
+
+ public Object bottom() {
+ return this.tailRecord.object;
+ }
+
+ public static TableIterator singleItemIterator(Object object) {
+ return new BaseTableIterator(new TableRecord(object));
+ }
}
diff --git a/drools-core/src/main/java/org/drools/leaps/util/TableOutOfBoundException.java b/drools-core/src/main/java/org/drools/leaps/util/TableOutOfBoundException.java
deleted file mode 100644
index 2aad4938ec6..00000000000
--- a/drools-core/src/main/java/org/drools/leaps/util/TableOutOfBoundException.java
+++ /dev/null
@@ -1,41 +0,0 @@
-package org.drools.leaps.util;
-
-/*
- * Copyright 2005 Alexander Bagerman
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- *
- * @author Alexander Bagerman
- *
- */
-public class TableOutOfBoundException extends Exception {
- /**
- *
- */
- private static final long serialVersionUID = 1L;
-
- public TableOutOfBoundException() {
- super();
- }
-
- public TableOutOfBoundException(String msg) {
- super(msg);
- }
-
- public TableOutOfBoundException(Exception ex) {
- super(ex);
- }
-}
diff --git a/drools-core/src/test/java/org/drools/leaps/LogicalAssertionTest.java b/drools-core/src/test/java/org/drools/leaps/LogicalAssertionTest.java
index 3734ea589ea..0160fe3273c 100644
--- a/drools-core/src/test/java/org/drools/leaps/LogicalAssertionTest.java
+++ b/drools-core/src/test/java/org/drools/leaps/LogicalAssertionTest.java
@@ -16,6 +16,8 @@
* limitations under the License.
*/
+import java.util.ArrayList;
+
import org.drools.DroolsTestCase;
import org.drools.FactException;
import org.drools.FactHandle;
@@ -58,6 +60,8 @@ public void testEqualsMap() throws Exception {
rule1.setConsequence(this.consequence);
+ LeapsRule leapsRule1 = new LeapsRule(rule1, new ArrayList(), new ArrayList(), new ArrayList(), new ArrayList());
+
FactHandleImpl[] factHandles = new FactHandleImpl[1];
PropagationContext context1;
LeapsTuple tuple1;
@@ -68,7 +72,7 @@ public void testEqualsMap() throws Exception {
context1 = new PropagationContextImpl(1, PropagationContext.ASSERTION,
rule1, null);
factHandles[0] = (FactHandleImpl) handle1;
- tuple1 = new LeapsTuple(factHandles, null, context1);
+ tuple1 = new LeapsTuple(factHandles, leapsRule1, context1);
this.workingMemory.assertTuple(tuple1);
FactHandle logicalHandle1 = this.workingMemory.assertObject(
logicalString1, false, true, null, this.workingMemory
@@ -79,7 +83,7 @@ public void testEqualsMap() throws Exception {
logicalString2, false, true, rule1, this.workingMemory
.getAgenda().getActivations()[0]);
factHandles[0] = (FactHandleImpl) logicalHandle2;
- tuple1 = new LeapsTuple(factHandles, null, context1);
+ tuple1 = new LeapsTuple(factHandles, leapsRule1, context1);
this.workingMemory.assertTuple(tuple1);
assertSame(logicalHandle1, logicalHandle2);
@@ -101,6 +105,8 @@ public void testStatedOverride() throws Exception {
rule1.setConsequence(this.consequence);
+ LeapsRule leapsRule1 = new LeapsRule(rule1, new ArrayList(), new ArrayList(), new ArrayList(), new ArrayList());
+
FactHandleImpl[] factHandles = new FactHandleImpl[1];
PropagationContext context1;
LeapsTuple tuple1;
@@ -111,7 +117,7 @@ public void testStatedOverride() throws Exception {
context1 = new PropagationContextImpl(1, PropagationContext.ASSERTION,
rule1, null);
factHandles[0] = (FactHandleImpl) handle1;
- tuple1 = new LeapsTuple(factHandles, null, context1);
+ tuple1 = new LeapsTuple(factHandles, leapsRule1, context1);
this.workingMemory.assertTuple(tuple1);
FactHandle logicalHandle1 = this.workingMemory.assertObject(
logicalString1, false, true, null, this.workingMemory
@@ -133,7 +139,7 @@ public void testStatedOverride() throws Exception {
// Test that a logical assertion cannot override a STATED assertion
factHandles[0] = (FactHandleImpl) logicalHandle2;
- tuple1 = new LeapsTuple(factHandles, null, context1);
+ tuple1 = new LeapsTuple(factHandles, leapsRule1, context1);
this.workingMemory.assertTuple(tuple1);
logicalString2 = new String("logical");
@@ -174,6 +180,8 @@ public void testRetract() throws Exception {
// create the first agendaItem which will justify the fact "logical"
rule1.setConsequence(this.consequence);
+ LeapsRule leapsRule1 = new LeapsRule(rule1, new ArrayList(), new ArrayList(), new ArrayList(), new ArrayList());
+
FactHandleImpl tuple1FactHandle = (FactHandleImpl) this.workingMemory
.assertObject("tuple1 object");
FactHandleImpl tuple2FactHandle = (FactHandleImpl) this.workingMemory
@@ -185,9 +193,9 @@ public void testRetract() throws Exception {
PropagationContext context = new PropagationContextImpl(0,
PropagationContext.ASSERTION, rule1, null);
- LeapsTuple tuple1 = new LeapsTuple(factHandlesTuple1, null,
+ LeapsTuple tuple1 = new LeapsTuple(factHandlesTuple1, leapsRule1,
context);
- LeapsTuple tuple2 = new LeapsTuple(factHandlesTuple2, null,
+ LeapsTuple tuple2 = new LeapsTuple(factHandlesTuple2, leapsRule1,
context);
this.workingMemory.assertTuple(tuple1);
Activation activation1 = this.workingMemory.getAgenda()
@@ -204,7 +212,7 @@ public void testRetract() throws Exception {
rule2.setConsequence(this.consequence);
PropagationContext context2 = new PropagationContextImpl(0,
PropagationContext.ASSERTION, rule2, null);
- tuple1 = new LeapsTuple(factHandlesTuple2, null, context2);
+ tuple1 = new LeapsTuple(factHandlesTuple2, leapsRule1, context2);
this.workingMemory.assertTuple(tuple1);
Activation activation2 = this.workingMemory.getAgenda()
.getActivations()[1];
@@ -227,6 +235,9 @@ public void testMultipleLogicalRelationships() throws FactException {
final Rule rule1 = new Rule("test-rule1");
// create the first agendaItem which will justify the fact "logical"
rule1.setConsequence(this.consequence);
+
+ LeapsRule leapsRule1 = new LeapsRule(rule1, new ArrayList(), new ArrayList(), new ArrayList(), new ArrayList());
+
FactHandleImpl tuple1Fact = (FactHandleImpl) this.workingMemory
.assertObject("tuple1 object");
FactHandleImpl tuple2Fact = (FactHandleImpl) this.workingMemory
@@ -238,7 +249,7 @@ public void testMultipleLogicalRelationships() throws FactException {
PropagationContext context1 = new PropagationContextImpl(0,
PropagationContext.ASSERTION, rule1, null);
- LeapsTuple tuple1 = new LeapsTuple(tuple1Handles, null, context1);
+ LeapsTuple tuple1 = new LeapsTuple(tuple1Handles, leapsRule1, context1);
this.workingMemory.assertTuple(tuple1);
Activation activation1 = this.workingMemory.getAgenda()
.getActivations()[0];
@@ -253,7 +264,7 @@ public void testMultipleLogicalRelationships() throws FactException {
rule2.setConsequence(this.consequence);
PropagationContext context2 = new PropagationContextImpl(0,
PropagationContext.ASSERTION, rule2, null);
- LeapsTuple tuple2 = new LeapsTuple(tuple2Handles, null, context2);
+ LeapsTuple tuple2 = new LeapsTuple(tuple2Handles, leapsRule1, context2);
this.workingMemory.assertTuple(tuple2);
// "logical" should only appear once
Activation activation2 = this.workingMemory.getAgenda()
diff --git a/drools-core/src/test/java/org/drools/leaps/SchedulerTest.java b/drools-core/src/test/java/org/drools/leaps/SchedulerTest.java
index 4e243de91f2..25153262ee3 100644
--- a/drools-core/src/test/java/org/drools/leaps/SchedulerTest.java
+++ b/drools-core/src/test/java/org/drools/leaps/SchedulerTest.java
@@ -147,7 +147,7 @@ public void evaluate(KnowledgeHelper knowledgeHelper,
data.size() );
// sleep for 0.5 seconds
- Thread.sleep( 500 );
+ Thread.sleep( 1000 );
// now check for update
assertEquals( 4,
diff --git a/drools-core/src/test/java/org/drools/leaps/util/TableTest.java b/drools-core/src/test/java/org/drools/leaps/util/TableTest.java
index ff230cd3cb7..6e876f8235d 100644
--- a/drools-core/src/test/java/org/drools/leaps/util/TableTest.java
+++ b/drools-core/src/test/java/org/drools/leaps/util/TableTest.java
@@ -171,43 +171,39 @@ public void testTailIterator() {
this.testTable.add(this.h1000);
this.testTable.add(this.h100);
this.testTable.add(this.h10);
- try {
- TableIterator it = this.testTable.tailIterator(this.h100, this.h10);
- assertTrue(it.hasNext());
- assertEquals(it.next(), this.h10);
- assertTrue(it.hasNext());
- assertEquals(it.next(), this.h1);
- assertFalse(it.hasNext());
- it.reset();
- assertTrue(it.hasNext());
- assertEquals(it.next(), this.h100);
- assertTrue(it.hasNext());
- assertEquals(it.next(), this.h10);
- assertTrue(it.hasNext());
- assertEquals(it.next(), this.h1);
- assertFalse(it.hasNext());
-
- this.testTable.clear();
- Handle fh1 = new Handle(1, new Guest("1", Sex.resolve("m"), Hobby
- .resolve("h2")));
- Handle fh2 = new Handle(2, new Guest("1", Sex.resolve("m"), Hobby
- .resolve("h1")));
- Handle fh3 = new Handle(3, new Guest("1", Sex.resolve("m"), Hobby
- .resolve("h3")));
- Handle fh4 = new Handle(4, new Guest("3", Sex.resolve("f"), Hobby
- .resolve("h2")));
- Handle fhC = new Handle(5, new Context("start"));
- this.testTable.add(fh1);
- this.testTable.add(fh2);
- this.testTable.add(fh3);
- this.testTable.add(fh4);
- it = this.testTable.tailIterator(fhC, fhC);
- assertTrue(it.hasNext());
- assertEquals(it.next(), fh4);
-
- } catch (TableOutOfBoundException ex) {
-
- }
+
+ TableIterator it = this.testTable.tailIterator(this.h100, this.h10);
+ assertTrue(it.hasNext());
+ assertEquals(it.next(), this.h10);
+ assertTrue(it.hasNext());
+ assertEquals(it.next(), this.h1);
+ assertFalse(it.hasNext());
+ it.reset();
+ assertTrue(it.hasNext());
+ assertEquals(it.next(), this.h100);
+ assertTrue(it.hasNext());
+ assertEquals(it.next(), this.h10);
+ assertTrue(it.hasNext());
+ assertEquals(it.next(), this.h1);
+ assertFalse(it.hasNext());
+
+ this.testTable.clear();
+ Handle fh1 = new Handle(1, new Guest("1", Sex.resolve("m"), Hobby
+ .resolve("h2")));
+ Handle fh2 = new Handle(2, new Guest("1", Sex.resolve("m"), Hobby
+ .resolve("h1")));
+ Handle fh3 = new Handle(3, new Guest("1", Sex.resolve("m"), Hobby
+ .resolve("h3")));
+ Handle fh4 = new Handle(4, new Guest("3", Sex.resolve("f"), Hobby
+ .resolve("h2")));
+ Handle fhC = new Handle(5, new Context("start"));
+ this.testTable.add(fh1);
+ this.testTable.add(fh2);
+ this.testTable.add(fh3);
+ this.testTable.add(fh4);
+ it = this.testTable.tailIterator(fhC, fhC);
+ assertTrue(it.hasNext());
+ assertEquals(it.next(), fh4);
}
public void testHeadIterator() {
|
b3c028768301ee4a6fd86d6ab24f85f20578d003
|
camel
|
Added unit tests based on the mailing list- question--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1224673 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/test/java/org/apache/camel/language/simple/SimpleParserPredicateTest.java b/camel-core/src/test/java/org/apache/camel/language/simple/SimpleParserPredicateTest.java
index abb604fde1704..38cadd2444108 100644
--- a/camel-core/src/test/java/org/apache/camel/language/simple/SimpleParserPredicateTest.java
+++ b/camel-core/src/test/java/org/apache/camel/language/simple/SimpleParserPredicateTest.java
@@ -177,5 +177,13 @@ public void testSimpleManyOrLogical() throws Exception {
assertTrue("Should match", pre.matches(exchange));
}
+
+ public void testSimpleExpressionPredicate() throws Exception {
+ exchange.getIn().setBody("Hello");
+ exchange.getIn().setHeader("number", "1234");
+ SimplePredicateParser parser = new SimplePredicateParser("${in.header.number} regex '\\d{4}'");
+ Predicate pre = parser.parsePredicate();
+ assertTrue("Should match", pre.matches(exchange));
+ }
}
diff --git a/camel-core/src/test/java/org/apache/camel/language/simple/SimpleParserRegexpPredicateTest.java b/camel-core/src/test/java/org/apache/camel/language/simple/SimpleParserRegexpPredicateTest.java
index 97b060096bca8..70a2afe88ff88 100644
--- a/camel-core/src/test/java/org/apache/camel/language/simple/SimpleParserRegexpPredicateTest.java
+++ b/camel-core/src/test/java/org/apache/camel/language/simple/SimpleParserRegexpPredicateTest.java
@@ -32,6 +32,9 @@ public void testSimpleRegexp() throws Exception {
Predicate pre = parser.parsePredicate();
assertTrue(pre.matches(exchange));
+
+ exchange.getIn().setBody("12.2a.22ab");
+ assertFalse(pre.matches(exchange));
}
}
diff --git a/components/camel-spring/src/test/java/org/apache/camel/language/SpringSimpleRegexTest.java b/components/camel-spring/src/test/java/org/apache/camel/language/SpringSimpleRegexTest.java
new file mode 100644
index 0000000000000..c38221444e7de
--- /dev/null
+++ b/components/camel-spring/src/test/java/org/apache/camel/language/SpringSimpleRegexTest.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.language;
+
+import org.apache.camel.spring.SpringTestSupport;
+import org.springframework.context.support.AbstractXmlApplicationContext;
+import org.springframework.context.support.ClassPathXmlApplicationContext;
+
+public class SpringSimpleRegexTest extends SpringTestSupport {
+
+ @Override
+ protected AbstractXmlApplicationContext createApplicationContext() {
+ return new ClassPathXmlApplicationContext("org/apache/camel/language/springSimpleRegexContext.xml");
+ }
+
+ public void testSimpleRegex() {
+ String result = template.requestBody("direct:start", "Something is wrong", String.class);
+ System.out.println("result : " + result);
+
+ result = template.requestBody("direct:start", "12.34.5678", String.class);
+ System.out.println("result : " + result);
+ }
+
+
+}
diff --git a/components/camel-spring/src/test/resources/org/apache/camel/language/springSimpleRegexContext.xml b/components/camel-spring/src/test/resources/org/apache/camel/language/springSimpleRegexContext.xml
new file mode 100644
index 0000000000000..6becbc1f6bfbc
--- /dev/null
+++ b/components/camel-spring/src/test/resources/org/apache/camel/language/springSimpleRegexContext.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="
+ http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
+ http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd
+ ">
+
+ <camelContext xmlns="http://camel.apache.org/schema/spring">
+ <route>
+ <from uri="direct:start" />
+ <choice>
+ <when>
+ <simple>${body} regex '^\d{2}\.\d{2}\.\d{4}$'</simple>
+ <setBody>
+ <constant>Found the result.</constant>
+ </setBody>
+ </when>
+ <otherwise>
+ <setBody>
+ <constant>Let's keep looking.</constant>
+ </setBody>
+ </otherwise>
+ </choice>
+ </route>
+ </camelContext>
+
+</beans>
|
296b6b2f5757d2f8100daef0d2507183deeca77a
|
elasticsearch
|
use custom similarity in search (if there is one)--
|
a
|
https://github.com/elastic/elasticsearch
|
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/common/lucene/search/ExtendedIndexSearcher.java b/modules/elasticsearch/src/main/java/org/elasticsearch/common/lucene/search/ExtendedIndexSearcher.java
new file mode 100644
index 0000000000000..960fcef9a5416
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/common/lucene/search/ExtendedIndexSearcher.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.lucene.search;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.search.IndexSearcher;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class ExtendedIndexSearcher extends IndexSearcher {
+
+ public ExtendedIndexSearcher(IndexSearcher searcher) {
+ super(searcher.getIndexReader());
+ setSimilarity(searcher.getSimilarity());
+ }
+
+ public ExtendedIndexSearcher(IndexReader r) {
+ super(r);
+ }
+
+ public IndexReader[] subReaders() {
+ return this.subReaders;
+ }
+
+ public int[] docStarts() {
+ return this.docStarts;
+ }
+
+ // taken from DirectoryReader#readerIndex
+
+ public int readerIndex(int doc) {
+ int lo = 0; // search starts array
+ int hi = subReaders.length - 1; // for first element less
+
+ while (hi >= lo) {
+ int mid = (lo + hi) >>> 1;
+ int midValue = docStarts[mid];
+ if (doc < midValue)
+ hi = mid - 1;
+ else if (doc > midValue)
+ lo = mid + 1;
+ else { // found a match
+ while (mid + 1 < subReaders.length && docStarts[mid + 1] == midValue) {
+ mid++; // scan to last match
+ }
+ return mid;
+ }
+ }
+ return hi;
+ }
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/modules/elasticsearch/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java
index 1be00f814eb03..d7dae6bfe068f 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java
@@ -19,10 +19,11 @@
package org.elasticsearch.search.internal;
-import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.*;
import org.elasticsearch.common.collect.Lists;
import org.elasticsearch.common.lucene.MultiCollector;
+import org.elasticsearch.common.lucene.search.ExtendedIndexSearcher;
+import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.search.dfs.CachedDfSource;
import java.io.IOException;
@@ -31,7 +32,7 @@
/**
* @author kimchy (shay.banon)
*/
-public class ContextIndexSearcher extends IndexSearcher {
+public class ContextIndexSearcher extends ExtendedIndexSearcher {
private SearchContext searchContext;
@@ -43,42 +44,11 @@ public class ContextIndexSearcher extends IndexSearcher {
private boolean useGlobalCollectors = false;
- public ContextIndexSearcher(SearchContext searchContext, IndexReader r) {
- super(r);
+ public ContextIndexSearcher(SearchContext searchContext, Engine.Searcher searcher) {
+ super(searcher.searcher());
this.searchContext = searchContext;
}
- public IndexReader[] subReaders() {
- return this.subReaders;
- }
-
- public int[] docStarts() {
- return this.docStarts;
- }
-
- // taken from DirectoryReader#readerIndex
-
- public int readerIndex(int doc) {
- int lo = 0; // search starts array
- int hi = subReaders.length - 1; // for first element less
-
- while (hi >= lo) {
- int mid = (lo + hi) >>> 1;
- int midValue = docStarts[mid];
- if (doc < midValue)
- hi = mid - 1;
- else if (doc > midValue)
- lo = mid + 1;
- else { // found a match
- while (mid + 1 < subReaders.length && docStarts[mid + 1] == midValue) {
- mid++; // scan to last match
- }
- return mid;
- }
- }
- return hi;
- }
-
public void dfSource(CachedDfSource dfSource) {
this.dfSource = dfSource;
}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/modules/elasticsearch/src/main/java/org/elasticsearch/search/internal/SearchContext.java
index 833964d46e2bb..4687e23969fd7 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/search/internal/SearchContext.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/search/internal/SearchContext.java
@@ -129,7 +129,7 @@ public SearchContext(long id, SearchShardTarget shardTarget, TimeValue timeout,
this.fetchResult = new FetchSearchResult(id, shardTarget);
this.indexService = indexService;
- this.searcher = new ContextIndexSearcher(this, engineSearcher.reader());
+ this.searcher = new ContextIndexSearcher(this, engineSearcher);
}
@Override public boolean release() throws ElasticSearchException {
|
34baa742739f0d4371816ee24225398affcf2d39
|
hadoop
|
YARN-1718. Fix a couple isTerminals in Fair- Scheduler queue placement rules (Sandy Ryza)--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1569929 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 1dfa87eee2b09..86f8a891c344b 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -14,6 +14,9 @@ Release 2.5.0 - UNRELEASED
BUG FIXES
+ YARN-1718. Fix a couple isTerminals in Fair Scheduler queue placement rules
+ (Sandy Ryza)
+
Release 2.4.0 - UNRELEASED
INCOMPATIBLE CHANGES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueuePlacementRule.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueuePlacementRule.java
index ac0df50954680..6acba27479f49 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueuePlacementRule.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueuePlacementRule.java
@@ -162,7 +162,7 @@ protected String getQueueForApp(String requestedQueue,
@Override
public boolean isTerminal() {
- return create;
+ return false;
}
}
@@ -201,7 +201,7 @@ protected String getQueueForApp(String requestedQueue, String user,
@Override
public boolean isTerminal() {
- return create;
+ return true;
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestQueuePlacementPolicy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestQueuePlacementPolicy.java
index 5b5a51fa78518..fd807c9d7e147 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestQueuePlacementPolicy.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestQueuePlacementPolicy.java
@@ -106,6 +106,17 @@ public void testTerminalRuleInMiddle() throws Exception {
parse(sb.toString());
}
+ @Test
+ public void testTerminals() throws Exception {
+ // Should make it through without an exception
+ StringBuffer sb = new StringBuffer();
+ sb.append("<queuePlacementPolicy>");
+ sb.append(" <rule name='secondaryGroupExistingQueue' create='true'/>");
+ sb.append(" <rule name='default' create='false'/>");
+ sb.append("</queuePlacementPolicy>");
+ parse(sb.toString());
+ }
+
private QueuePlacementPolicy parse(String str) throws Exception {
// Read and parse the allocations file.
DocumentBuilderFactory docBuilderFactory =
|
a72066f13f76503665abc5d93fcc6edb65ff3f28
|
hbase
|
HBASE-9600 TestColumnSchemaModel and- TestTableSchemaModel test cases are failing with IBM IBM Java 6--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1525179 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java
index e7dc05d032e5..05c2dc0d2444 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java
@@ -20,7 +20,7 @@
package org.apache.hadoop.hbase.rest.model;
import java.io.Serializable;
-import java.util.HashMap;
+import java.util.LinkedHashMap;
import java.util.Map;
import javax.xml.bind.annotation.XmlAnyAttribute;
@@ -57,7 +57,7 @@ public class ColumnSchemaModel implements Serializable {
private static QName VERSIONS = new QName(HConstants.VERSIONS);
private String name;
- private Map<QName,Object> attrs = new HashMap<QName,Object>();
+ private Map<QName,Object> attrs = new LinkedHashMap<QName,Object>();
/**
* Default constructor
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
index 7812c602017b..6241db91c8cd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
@@ -22,7 +22,7 @@
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
-import java.util.HashMap;
+import java.util.LinkedHashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@@ -74,7 +74,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
new QName(HColumnDescriptor.COMPRESSION);
private String name;
- private Map<QName,Object> attrs = new HashMap<QName,Object>();
+ private Map<QName,Object> attrs = new LinkedHashMap<QName,Object>();
private List<ColumnSchemaModel> columns = new ArrayList<ColumnSchemaModel>();
/**
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java
index cb022d12c053..15e165285340 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java
@@ -59,13 +59,13 @@ public TestColumnSchemaModel() throws Exception {
protected ColumnSchemaModel buildTestModel() {
ColumnSchemaModel model = new ColumnSchemaModel();
model.setName(COLUMN_NAME);
- model.__setBlockcache(BLOCKCACHE);
model.__setBlocksize(BLOCKSIZE);
model.__setBloomfilter(BLOOMFILTER);
+ model.__setBlockcache(BLOCKCACHE);
model.__setCompression(COMPRESSION);
- model.__setInMemory(IN_MEMORY);
- model.__setTTL(TTL);
model.__setVersions(VERSIONS);
+ model.__setTTL(TTL);
+ model.__setInMemory(IN_MEMORY);
return model;
}
|
f53baebf899b9a03f2bddbc205a764f3002911eb
|
kotlin
|
Show warning on usages of javaClass<T>() in- annotations loaded from Java--
|
a
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/frontend.java/src/org/jetbrains/kotlin/load/kotlin/JavaAnnotationCallChecker.kt b/compiler/frontend.java/src/org/jetbrains/kotlin/load/kotlin/JavaAnnotationCallChecker.kt
index ec41dae95eceb..07f917fcf7430 100644
--- a/compiler/frontend.java/src/org/jetbrains/kotlin/load/kotlin/JavaAnnotationCallChecker.kt
+++ b/compiler/frontend.java/src/org/jetbrains/kotlin/load/kotlin/JavaAnnotationCallChecker.kt
@@ -16,15 +16,23 @@
package org.jetbrains.kotlin.load.kotlin
+import org.jetbrains.kotlin.builtins.KotlinBuiltIns
import org.jetbrains.kotlin.descriptors.CallableDescriptor
+import org.jetbrains.kotlin.descriptors.ClassDescriptor
import org.jetbrains.kotlin.descriptors.ClassKind
+import org.jetbrains.kotlin.descriptors.ValueParameterDescriptor
+import org.jetbrains.kotlin.diagnostics.DiagnosticFactory0
import org.jetbrains.kotlin.load.java.JvmAnnotationNames
import org.jetbrains.kotlin.load.java.descriptors.JavaConstructorDescriptor
+import org.jetbrains.kotlin.psi.JetExpression
+import org.jetbrains.kotlin.resolve.DescriptorUtils
import org.jetbrains.kotlin.resolve.calls.checkers.CallChecker
import org.jetbrains.kotlin.resolve.calls.context.BasicCallResolutionContext
import org.jetbrains.kotlin.resolve.calls.model.ExpressionValueArgument
import org.jetbrains.kotlin.resolve.calls.model.ResolvedCall
+import org.jetbrains.kotlin.resolve.calls.model.ResolvedValueArgument
import org.jetbrains.kotlin.resolve.jvm.diagnostics.ErrorsJvm
+import org.jetbrains.kotlin.types.JetType
public class JavaAnnotationCallChecker : CallChecker {
override fun <F : CallableDescriptor?> check(resolvedCall: ResolvedCall<F>, context: BasicCallResolutionContext) {
@@ -32,14 +40,56 @@ public class JavaAnnotationCallChecker : CallChecker {
if (resultingDescriptor !is JavaConstructorDescriptor ||
resultingDescriptor.getContainingDeclaration().getKind() != ClassKind.ANNOTATION_CLASS) return
+ reportErrorsOnPositionedArguments(resolvedCall, context)
+ reportWarningOnJavaClassUsages(resolvedCall, context)
+ }
+
+ private fun reportWarningOnJavaClassUsages(
+ resolvedCall: ResolvedCall<*>,
+ context: BasicCallResolutionContext
+ ) {
+ resolvedCall.getValueArguments().filter { it.getKey().getType().isJavaLangClassOrArray() }.forEach {
+ reportOnValueArgument(context, it, ErrorsJvm.JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION)
+ }
+ }
+
+ private fun JetType.isJavaLangClassOrArray() = isJavaLangClass() ||
+ (KotlinBuiltIns.isArray(this) && getArguments().first().getType().isJavaLangClass())
+
+ private fun JetType.isJavaLangClass(): Boolean {
+ val classifier = getConstructor().getDeclarationDescriptor()
+
+ if (classifier !is ClassDescriptor) return false
+ return DescriptorUtils.isJavaLangClass(classifier)
+ }
+
+ private fun reportErrorsOnPositionedArguments(
+ resolvedCall: ResolvedCall<*>,
+ context: BasicCallResolutionContext
+ ) {
resolvedCall.getValueArguments().filter {
- p -> p.key.getName() != JvmAnnotationNames.DEFAULT_ANNOTATION_MEMBER_NAME &&
- p.value is ExpressionValueArgument &&
- !((p.value as ExpressionValueArgument).getValueArgument()?.isNamed() ?: true)
+ p ->
+ p.key.getName() != JvmAnnotationNames.DEFAULT_ANNOTATION_MEMBER_NAME &&
+ p.value is ExpressionValueArgument &&
+ !((p.value as ExpressionValueArgument).getValueArgument()?.isNamed() ?: true)
}.forEach {
- context.trace.report(
- ErrorsJvm.POSITIONED_VALUE_ARGUMENT_FOR_JAVA_ANNOTATION.on(
- it.getValue().getArguments().first().getArgumentExpression()))
+ reportOnValueArgument(context, it, ErrorsJvm.POSITIONED_VALUE_ARGUMENT_FOR_JAVA_ANNOTATION)
+ }
+ }
+
+ private fun reportOnValueArgument(
+ context: BasicCallResolutionContext,
+ argument: Map.Entry<ValueParameterDescriptor, ResolvedValueArgument>,
+ diagnostic: DiagnosticFactory0<JetExpression>
+ ) {
+ argument.getValue().getArguments().forEach {
+ if (it.getArgumentExpression() != null) {
+ context.trace.report(
+ diagnostic.on(
+ it.getArgumentExpression()
+ )
+ )
+ }
}
}
}
diff --git a/compiler/frontend.java/src/org/jetbrains/kotlin/resolve/jvm/diagnostics/DefaultErrorMessagesJvm.java b/compiler/frontend.java/src/org/jetbrains/kotlin/resolve/jvm/diagnostics/DefaultErrorMessagesJvm.java
index 92550e0b3e788..4d72b7482cc13 100644
--- a/compiler/frontend.java/src/org/jetbrains/kotlin/resolve/jvm/diagnostics/DefaultErrorMessagesJvm.java
+++ b/compiler/frontend.java/src/org/jetbrains/kotlin/resolve/jvm/diagnostics/DefaultErrorMessagesJvm.java
@@ -57,6 +57,7 @@ public String render(@NotNull ConflictingJvmDeclarationsData data) {
MAP.put(ErrorsJvm.NATIVE_DECLARATION_CANNOT_BE_INLINED, "Members of traits can not be inlined");
MAP.put(ErrorsJvm.POSITIONED_VALUE_ARGUMENT_FOR_JAVA_ANNOTATION, "Only named arguments are available for Java annotations");
+ MAP.put(ErrorsJvm.JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION, "Usage of `javaClass<T>()` in annotations is deprecated. Use T::class instead");
MAP.put(ErrorsJvm.NO_REFLECTION_IN_CLASS_PATH, "Expression ''{0}'' uses reflection which is not found in compilation classpath. Make sure you have kotlin-reflect.jar in the classpath", Renderers.ELEMENT_TEXT);
diff --git a/compiler/frontend.java/src/org/jetbrains/kotlin/resolve/jvm/diagnostics/ErrorsJvm.java b/compiler/frontend.java/src/org/jetbrains/kotlin/resolve/jvm/diagnostics/ErrorsJvm.java
index d28078e6e6b4c..c0b5ec2f51d48 100644
--- a/compiler/frontend.java/src/org/jetbrains/kotlin/resolve/jvm/diagnostics/ErrorsJvm.java
+++ b/compiler/frontend.java/src/org/jetbrains/kotlin/resolve/jvm/diagnostics/ErrorsJvm.java
@@ -52,6 +52,7 @@ public interface ErrorsJvm {
DiagnosticFactory0<JetDeclaration> NATIVE_DECLARATION_CANNOT_BE_INLINED = DiagnosticFactory0.create(ERROR, DECLARATION_SIGNATURE);
DiagnosticFactory0<JetExpression> POSITIONED_VALUE_ARGUMENT_FOR_JAVA_ANNOTATION = DiagnosticFactory0.create(ERROR);
+ DiagnosticFactory0<JetExpression> JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION = DiagnosticFactory0.create(WARNING);
// TODO: make this a warning
DiagnosticFactory1<JetExpression, JetExpression> NO_REFLECTION_IN_CLASS_PATH = DiagnosticFactory1.create(ERROR);
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/AnnotationResolver.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/AnnotationResolver.java
index f5b310d1a9ffc..4792eeee08129 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/AnnotationResolver.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/AnnotationResolver.java
@@ -306,7 +306,7 @@ private static void checkCompileTimeConstant(
if (descriptor != null && DescriptorUtils.isEnumClass(descriptor)) {
trace.report(Errors.ANNOTATION_PARAMETER_MUST_BE_ENUM_CONST.on(argumentExpression));
}
- else if (descriptor instanceof ClassDescriptor && CompileTimeConstantUtils.isJavaLangClass((ClassDescriptor) descriptor)) {
+ else if (descriptor instanceof ClassDescriptor && DescriptorUtils.isJavaLangClass((ClassDescriptor) descriptor)) {
trace.report(Errors.ANNOTATION_PARAMETER_MUST_BE_CLASS_LITERAL.on(argumentExpression));
}
else if (descriptor instanceof ClassDescriptor && KotlinBuiltIns.isKClass((ClassDescriptor) descriptor)) {
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/CompileTimeConstantUtils.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/CompileTimeConstantUtils.java
index 2187d6d074bbf..aae2f988c6cf0 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/CompileTimeConstantUtils.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/CompileTimeConstantUtils.java
@@ -78,7 +78,7 @@ private static boolean isAcceptableTypeForAnnotationParameter(@NotNull JetType p
KotlinBuiltIns builtIns = KotlinBuiltIns.getInstance();
if (isEnumClass(typeDescriptor) ||
isAnnotationClass(typeDescriptor) ||
- isJavaLangClass(typeDescriptor) ||
+ DescriptorUtils.isJavaLangClass(typeDescriptor) ||
KotlinBuiltIns.isKClass(typeDescriptor) ||
KotlinBuiltIns.isPrimitiveArray(parameterType) ||
KotlinBuiltIns.isPrimitiveType(parameterType) ||
@@ -97,7 +97,7 @@ private static boolean isAcceptableTypeForAnnotationParameter(@NotNull JetType p
if (arrayTypeDescriptor != null) {
return isEnumClass(arrayTypeDescriptor) ||
isAnnotationClass(arrayTypeDescriptor) ||
- isJavaLangClass(arrayTypeDescriptor) ||
+ DescriptorUtils.isJavaLangClass(arrayTypeDescriptor) ||
KotlinBuiltIns.isKClass(arrayTypeDescriptor) ||
builtIns.getStringType().equals(arrayType);
}
@@ -127,10 +127,6 @@ public static boolean isJavaClassMethodCall(@NotNull ResolvedCall<?> resolvedCal
return "kotlin.javaClass.function".equals(getIntrinsicAnnotationArgument(resolvedCall.getResultingDescriptor().getOriginal()));
}
- public static boolean isJavaLangClass(ClassDescriptor descriptor) {
- return "java.lang.Class".equals(DescriptorUtils.getFqName(descriptor).asString());
- }
-
public static boolean canBeReducedToBooleanConstant(
@Nullable JetExpression expression,
@NotNull BindingTrace trace,
diff --git a/compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassArgumentError.kt b/compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassArgumentError.kt
new file mode 100644
index 0000000000000..f1e76b1d440f2
--- /dev/null
+++ b/compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassArgumentError.kt
@@ -0,0 +1,17 @@
+// FILE: A.java
+public @interface A {
+ Class<?>[] value();
+}
+
+// FILE: b.kt
+val jClass = javaClass<String>()
+A(
+ <!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION, ANNOTATION_PARAMETER_MUST_BE_CLASS_LITERAL!>jClass<!>,
+ <!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION!>javaClass<Int>()<!>
+)
+class MyClass1
+
+A(
+ <!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION!>javaClass<<!UNRESOLVED_REFERENCE!>Err<!>>()<!>,
+ <!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION!>javaClass<String>()<!>
+) class MyClass2
diff --git a/compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassArgumentError.txt b/compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassArgumentError.txt
new file mode 100644
index 0000000000000..3fb2f0ce6ac25
--- /dev/null
+++ b/compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassArgumentError.txt
@@ -0,0 +1,26 @@
+package
+
+internal val jClass: java.lang.Class<kotlin.String>
+
+public final annotation class A : kotlin.Annotation {
+ public /*synthesized*/ constructor A(/*0*/ vararg value: java.lang.Class<*> /*kotlin.Array<out java.lang.Class<*>>*/)
+ public constructor A(/*0*/ vararg value: kotlin.reflect.KClass<*> /*kotlin.Array<out kotlin.reflect.KClass<*>>*/)
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+ public abstract fun value(): kotlin.Array<kotlin.reflect.KClass<*>>
+}
+
+A(value = {javaClass<kotlin.Int>()}: kotlin.Array<out java.lang.Class<*>>) internal final class MyClass1 {
+ public constructor MyClass1()
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+}
+
+A(value = {javaClass<[ERROR : Err]>(), javaClass<kotlin.String>()}: kotlin.Array<out java.lang.Class<*>>) internal final class MyClass2 {
+ public constructor MyClass2()
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+}
diff --git a/compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassArrayInAnnotations.kt b/compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassArrayInAnnotations.kt
new file mode 100644
index 0000000000000..ab3eab07cbc76
--- /dev/null
+++ b/compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassArrayInAnnotations.kt
@@ -0,0 +1,26 @@
+// FILE: A.java
+public @interface A {
+ Class<?>[] value() default {Integer.class};
+ Class<?>[] arg() default {String.class};
+}
+
+// FILE: b.kt
+A(<!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION!>javaClass<Int>()<!>,
+<!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION!>javaClass<Any>()<!>,
+arg = <!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION!>array(javaClass<String>(), javaClass<Double>())<!>)
+class MyClass1
+
+A(<!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION!>javaClass<Int>()<!>, <!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION!>javaClass<Any>()<!>)
+class MyClass2
+
+A(arg = <!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION!>array(javaClass<String>(), javaClass<Double>())<!>)
+class MyClass3
+
+A class MyClass4
+
+A(value = *<!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION!>array(javaClass<Int>(), javaClass<Any>())<!>,
+arg = <!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION!>array(javaClass<String>(), javaClass<Double>())<!>)
+class MyClass5
+
+A(value = *<!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION!>array(javaClass<Int>(), javaClass<Any>())<!>)
+class MyClass6
diff --git a/compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassArrayInAnnotations.txt b/compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassArrayInAnnotations.txt
new file mode 100644
index 0000000000000..5f97cf407f09b
--- /dev/null
+++ b/compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassArrayInAnnotations.txt
@@ -0,0 +1,53 @@
+package
+
+public final annotation class A : kotlin.Annotation {
+ public /*synthesized*/ constructor A(/*0*/ vararg value: java.lang.Class<*> /*kotlin.Array<out java.lang.Class<*>>*/ = ..., /*1*/ arg: kotlin.Array<out java.lang.Class<*>> = ...)
+ public constructor A(/*0*/ vararg value: kotlin.reflect.KClass<*> /*kotlin.Array<out kotlin.reflect.KClass<*>>*/ = ..., /*1*/ arg: kotlin.Array<out kotlin.reflect.KClass<*>> = ...)
+ public abstract fun arg(): kotlin.Array<kotlin.reflect.KClass<*>>
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+ public abstract fun value(): kotlin.Array<kotlin.reflect.KClass<*>>
+}
+
+A(arg = {javaClass<kotlin.String>(), javaClass<kotlin.Double>()}: kotlin.Array<java.lang.Class<out kotlin.Comparable<out kotlin.Any?>>>, value = {javaClass<kotlin.Int>(), javaClass<kotlin.Any>()}: kotlin.Array<out java.lang.Class<*>>) internal final class MyClass1 {
+ public constructor MyClass1()
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+}
+
+A(value = {javaClass<kotlin.Int>(), javaClass<kotlin.Any>()}: kotlin.Array<out java.lang.Class<*>>) internal final class MyClass2 {
+ public constructor MyClass2()
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+}
+
+A(arg = {javaClass<kotlin.String>(), javaClass<kotlin.Double>()}: kotlin.Array<java.lang.Class<out kotlin.Comparable<out kotlin.Any?>>>) internal final class MyClass3 {
+ public constructor MyClass3()
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+}
+
+A() internal final class MyClass4 {
+ public constructor MyClass4()
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+}
+
+A(arg = {javaClass<kotlin.String>(), javaClass<kotlin.Double>()}: kotlin.Array<java.lang.Class<out kotlin.Comparable<out kotlin.Any?>>>, value = {javaClass<kotlin.Int>(), javaClass<kotlin.Any>()}: kotlin.Array<java.lang.Class<out kotlin.Any>>) internal final class MyClass5 {
+ public constructor MyClass5()
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+}
+
+A(value = {javaClass<kotlin.Int>(), javaClass<kotlin.Any>()}: kotlin.Array<java.lang.Class<out kotlin.Any>>) internal final class MyClass6 {
+ public constructor MyClass6()
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+}
diff --git a/compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassInAnnotations.kt b/compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassInAnnotations.kt
new file mode 100644
index 0000000000000..4a7adf7db8045
--- /dev/null
+++ b/compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassInAnnotations.kt
@@ -0,0 +1,14 @@
+// FILE: A.java
+public @interface A {
+ Class<?> value() default Integer.class;
+ Class<?> arg() default String.class;
+}
+
+// FILE: b.kt
+A(<!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION!>javaClass<Int>()<!>, arg = <!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION!>javaClass<String>()<!>) class MyClass1
+A(<!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION!>javaClass<Int>()<!>) class MyClass2
+A(arg = <!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION!>javaClass<String>()<!>) class MyClass3
+A class MyClass4
+
+A(value = <!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION!>javaClass<Int>()<!>, arg = <!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION!>javaClass<String>()<!>) class MyClass5
+A(value = <!JAVA_LANG_CLASS_ARGUMENT_IN_ANNOTATION!>javaClass<Int>()<!>) class MyClass6
diff --git a/compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassInAnnotations.txt b/compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassInAnnotations.txt
new file mode 100644
index 0000000000000..5318be6a8f630
--- /dev/null
+++ b/compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassInAnnotations.txt
@@ -0,0 +1,53 @@
+package
+
+public final annotation class A : kotlin.Annotation {
+ public /*synthesized*/ constructor A(/*0*/ value: java.lang.Class<*> = ..., /*1*/ arg: java.lang.Class<*> = ...)
+ public constructor A(/*0*/ value: kotlin.reflect.KClass<*> = ..., /*1*/ arg: kotlin.reflect.KClass<*> = ...)
+ public abstract fun arg(): kotlin.reflect.KClass<*>
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+ public abstract fun value(): kotlin.reflect.KClass<*>
+}
+
+A(arg = javaClass<kotlin.String>(): java.lang.Class<kotlin.String>, value = javaClass<kotlin.Int>(): java.lang.Class<kotlin.Int>) internal final class MyClass1 {
+ public constructor MyClass1()
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+}
+
+A(value = javaClass<kotlin.Int>(): java.lang.Class<kotlin.Int>) internal final class MyClass2 {
+ public constructor MyClass2()
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+}
+
+A(arg = javaClass<kotlin.String>(): java.lang.Class<kotlin.String>) internal final class MyClass3 {
+ public constructor MyClass3()
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+}
+
+A() internal final class MyClass4 {
+ public constructor MyClass4()
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+}
+
+A(arg = javaClass<kotlin.String>(): java.lang.Class<kotlin.String>, value = javaClass<kotlin.Int>(): java.lang.Class<kotlin.Int>) internal final class MyClass5 {
+ public constructor MyClass5()
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+}
+
+A(value = javaClass<kotlin.Int>(): java.lang.Class<kotlin.Int>) internal final class MyClass6 {
+ public constructor MyClass6()
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+}
diff --git a/compiler/tests/org/jetbrains/kotlin/checkers/JetDiagnosticsTestWithStdLibGenerated.java b/compiler/tests/org/jetbrains/kotlin/checkers/JetDiagnosticsTestWithStdLibGenerated.java
index 16ff8282609fd..72ad06d7a15dc 100644
--- a/compiler/tests/org/jetbrains/kotlin/checkers/JetDiagnosticsTestWithStdLibGenerated.java
+++ b/compiler/tests/org/jetbrains/kotlin/checkers/JetDiagnosticsTestWithStdLibGenerated.java
@@ -117,6 +117,24 @@ public void testAllFilesPresentInAnnotationParameters() throws Exception {
JetTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters"), Pattern.compile("^(.+)\\.kt$"), true);
}
+ @TestMetadata("javaClassArgumentError.kt")
+ public void testJavaClassArgumentError() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassArgumentError.kt");
+ doTest(fileName);
+ }
+
+ @TestMetadata("javaClassArrayInAnnotations.kt")
+ public void testJavaClassArrayInAnnotations() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassArrayInAnnotations.kt");
+ doTest(fileName);
+ }
+
+ @TestMetadata("javaClassInAnnotations.kt")
+ public void testJavaClassInAnnotations() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/javaClassInAnnotations.kt");
+ doTest(fileName);
+ }
+
@TestMetadata("orderWithValue.kt")
public void testOrderWithValue() throws Exception {
String fileName = JetTestUtils.navigationMetadata("compiler/testData/diagnostics/testsWithStdLib/annotations/annotationParameters/orderWithValue.kt");
diff --git a/core/descriptors/src/org/jetbrains/kotlin/resolve/DescriptorUtils.java b/core/descriptors/src/org/jetbrains/kotlin/resolve/DescriptorUtils.java
index 275c7047a0a35..2b2a107fef7ca 100644
--- a/core/descriptors/src/org/jetbrains/kotlin/resolve/DescriptorUtils.java
+++ b/core/descriptors/src/org/jetbrains/kotlin/resolve/DescriptorUtils.java
@@ -532,4 +532,8 @@ private static void getSubPackagesFqNames(PackageViewDescriptor packageView, Set
}
}
}
+
+ public static boolean isJavaLangClass(ClassDescriptor descriptor) {
+ return "java.lang.Class".equals(getFqName(descriptor).asString());
+ }
}
|
875c9d62c6db32aca16201c369d972bcf4a38dbd
|
hadoop
|
HADOOP-6441. Protect web ui from cross site- scripting attacks (XSS) on the host http header and using encoded utf-7.- (omalley)--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@891132 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/CHANGES.txt b/CHANGES.txt
index b9b935d2d137d..dca7ac29771d4 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -1222,6 +1222,9 @@ Release 0.21.0 - Unreleased
HADOOP-6375. Sync documentation for FsShell du with its implementation.
(Todd Lipcon via cdouglas)
+ HADOOP-6441. Protect web ui from cross site scripting attacks (XSS) on
+ the host http header and using encoded utf-7. (omalley)
+
Release 0.20.2 - Unreleased
NEW FEATURES
diff --git a/src/java/org/apache/hadoop/http/HttpServer.java b/src/java/org/apache/hadoop/http/HttpServer.java
index 0257141b8e3b5..4123923abb395 100644
--- a/src/java/org/apache/hadoop/http/HttpServer.java
+++ b/src/java/org/apache/hadoop/http/HttpServer.java
@@ -624,6 +624,25 @@ public Map<String, String[]> getParameterMap() {
}
return result;
}
+
+ /**
+ * Quote the url so that users specifying the HOST HTTP header
+ * can't inject attacks.
+ */
+ @Override
+ public StringBuffer getRequestURL(){
+ String url = rawRequest.getRequestURL().toString();
+ return new StringBuffer(HtmlQuoting.quoteHtmlChars(url));
+ }
+
+ /**
+ * Quote the server name so that users specifying the HOST HTTP header
+ * can't inject attacks.
+ */
+ @Override
+ public String getServerName() {
+ return HtmlQuoting.quoteHtmlChars(rawRequest.getServerName());
+ }
}
@Override
@@ -641,6 +660,10 @@ public void doFilter(ServletRequest request,
) throws IOException, ServletException {
HttpServletRequestWrapper quoted =
new RequestQuoter((HttpServletRequest) request);
+ final HttpServletResponse httpResponse = (HttpServletResponse) response;
+ // set the default to UTF-8 so that we don't need to worry about IE7
+ // choosing to interpret the special characters as UTF-7
+ httpResponse.setContentType("text/html;charset=utf-8");
chain.doFilter(quoted, response);
}
|
a4c5847e42fcba0f109de1a3d312d94588249e66
|
camel
|
Fix compile error--git-svn-id: https://svn.apache.org/repos/asf/activemq/camel/trunk@707028 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/main/java/org/apache/camel/model/HandledPredicate.java b/camel-core/src/main/java/org/apache/camel/model/HandledPredicate.java
index 6df5ac5bc42c0..b3aff26d228f9 100644
--- a/camel-core/src/main/java/org/apache/camel/model/HandledPredicate.java
+++ b/camel-core/src/main/java/org/apache/camel/model/HandledPredicate.java
@@ -61,7 +61,7 @@ public void setPredicate(Predicate predicate) {
}
public Predicate createPredicate(RouteContext routeContext) {
- ExpressionType predicateType = getCompletePredicate();
+ ExpressionType predicateType = getHandledPredicate();
if (predicateType != null && predicate == null) {
predicate = predicateType.createPredicate(routeContext);
}
|
8800bab8a660f5ab9e2b100cee51af8a462d220c
|
spring-framework
|
DataSourceUtils lets timeout exceptions through- even for setReadOnly calls (revised; SPR-7226)--
|
c
|
https://github.com/spring-projects/spring-framework
|
diff --git a/org.springframework.jdbc/src/main/java/org/springframework/jdbc/datasource/DataSourceTransactionManager.java b/org.springframework.jdbc/src/main/java/org/springframework/jdbc/datasource/DataSourceTransactionManager.java
index 73612fe5a3b4..5bf57f3e3691 100644
--- a/org.springframework.jdbc/src/main/java/org/springframework/jdbc/datasource/DataSourceTransactionManager.java
+++ b/org.springframework.jdbc/src/main/java/org/springframework/jdbc/datasource/DataSourceTransactionManager.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2009 the original author or authors.
+ * Copyright 2002-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,7 +18,6 @@
import java.sql.Connection;
import java.sql.SQLException;
-
import javax.sql.DataSource;
import org.springframework.beans.factory.InitializingBean;
@@ -236,7 +235,7 @@ protected void doBegin(Object transaction, TransactionDefinition definition) {
}
}
- catch (SQLException ex) {
+ catch (Exception ex) {
DataSourceUtils.releaseConnection(con, this.dataSource);
throw new CannotCreateTransactionException("Could not open JDBC Connection for transaction", ex);
}
diff --git a/org.springframework.jdbc/src/main/java/org/springframework/jdbc/datasource/DataSourceUtils.java b/org.springframework.jdbc/src/main/java/org/springframework/jdbc/datasource/DataSourceUtils.java
index bb79f43d87e1..d6c57f9aee9f 100644
--- a/org.springframework.jdbc/src/main/java/org/springframework/jdbc/datasource/DataSourceUtils.java
+++ b/org.springframework.jdbc/src/main/java/org/springframework/jdbc/datasource/DataSourceUtils.java
@@ -154,14 +154,28 @@ public static Integer prepareConnectionForTransaction(Connection con, Transactio
}
con.setReadOnly(true);
}
- catch (Throwable ex) {
- if (ex instanceof SQLException && (ex.getClass().getSimpleName().contains("Timeout") ||
- (ex.getCause() != null && ex.getCause().getClass().getSimpleName().contains("Timeout")))) {
- // Assume it's a connection timeout that would otherwise get lost: e.g. from C3P0.
- throw (SQLException) ex;
+ catch (SQLException ex) {
+ Throwable exToCheck = ex;
+ while (exToCheck != null) {
+ if (exToCheck.getClass().getSimpleName().contains("Timeout")) {
+ // Assume it's a connection timeout that would otherwise get lost: e.g. from JDBC 4.0
+ throw ex;
+ }
+ exToCheck = exToCheck.getCause();
}
- // "read-only not supported" SQLException or UnsupportedOperationException
- // -> ignore, it's just a hint anyway.
+ // "read-only not supported" SQLException -> ignore, it's just a hint anyway
+ logger.debug("Could not set JDBC Connection read-only", ex);
+ }
+ catch (RuntimeException ex) {
+ Throwable exToCheck = ex;
+ while (exToCheck != null) {
+ if (exToCheck.getClass().getSimpleName().contains("Timeout")) {
+ // Assume it's a connection timeout that would otherwise get lost: e.g. from Hibernate
+ throw ex;
+ }
+ exToCheck = exToCheck.getCause();
+ }
+ // "read-only not supported" UnsupportedOperationException -> ignore, it's just a hint anyway
logger.debug("Could not set JDBC Connection read-only", ex);
}
}
|
5f00dc61b87eb4af734446d3a7b1c8928cf57e5d
|
camel
|
CAMEL-1933: Overhaul of JMX. Added managed send- to.Prepared for camel/route context runtime configuration to be managed.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@808777 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/main/java/org/apache/camel/Route.java b/camel-core/src/main/java/org/apache/camel/Route.java
index 0539773fda209..c7bdc8d77f1a8 100644
--- a/camel-core/src/main/java/org/apache/camel/Route.java
+++ b/camel-core/src/main/java/org/apache/camel/Route.java
@@ -19,6 +19,8 @@
import java.util.List;
import java.util.Map;
+import org.apache.camel.spi.RouteContext;
+
public interface Route {
String ID_PROPERTY = "id";
@@ -44,6 +46,13 @@ public interface Route {
*/
Map<String, Object> getProperties();
+ /**
+ * Gets the route context
+ *
+ * @return the route context
+ */
+ RouteContext getRouteContext();
+
/**
* This property map is used to associate information about
* the route. Gets all the services for this routes
diff --git a/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java b/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java
index 9f5db318fad28..f7763ca816839 100644
--- a/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java
+++ b/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java
@@ -591,7 +591,7 @@ public ServiceStatus getRouteStatus(String key) {
}
public void startRoute(RouteDefinition route) throws Exception {
- Collection<Route> routes = new ArrayList<Route>();
+ List<Route> routes = new ArrayList<Route>();
List<RouteContext> routeContexts = route.addRoutes(this, routes);
RouteService routeService = new RouteService(this, route, routeContexts, routes);
startRouteService(routeService);
diff --git a/camel-core/src/main/java/org/apache/camel/impl/DefaultRoute.java b/camel-core/src/main/java/org/apache/camel/impl/DefaultRoute.java
index e41d4caadc171..f89e34eed2ea0 100644
--- a/camel-core/src/main/java/org/apache/camel/impl/DefaultRoute.java
+++ b/camel-core/src/main/java/org/apache/camel/impl/DefaultRoute.java
@@ -24,6 +24,7 @@
import org.apache.camel.Endpoint;
import org.apache.camel.Route;
import org.apache.camel.Service;
+import org.apache.camel.spi.RouteContext;
/**
* A <a href="http://camel.apache.org/routes.html">Route</a>
@@ -37,13 +38,15 @@ public abstract class DefaultRoute extends ServiceSupport implements Route {
private final Endpoint endpoint;
private final Map<String, Object> properties = new HashMap<String, Object>();
private final List<Service> services = new ArrayList<Service>();
+ private final RouteContext routeContext;
- public DefaultRoute(Endpoint endpoint) {
+ public DefaultRoute(RouteContext routeContext, Endpoint endpoint) {
+ this.routeContext = routeContext;
this.endpoint = endpoint;
}
- public DefaultRoute(Endpoint endpoint, Service... services) {
- this(endpoint);
+ public DefaultRoute(RouteContext routeContext, Endpoint endpoint, Service... services) {
+ this(routeContext, endpoint);
for (Service service : services) {
addService(service);
}
@@ -62,6 +65,10 @@ public Endpoint getEndpoint() {
return endpoint;
}
+ public RouteContext getRouteContext() {
+ return routeContext;
+ }
+
public Map<String, Object> getProperties() {
return properties;
}
diff --git a/camel-core/src/main/java/org/apache/camel/impl/DefaultRouteContext.java b/camel-core/src/main/java/org/apache/camel/impl/DefaultRouteContext.java
index 84e3aedaa8005..7c25394510b85 100644
--- a/camel-core/src/main/java/org/apache/camel/impl/DefaultRouteContext.java
+++ b/camel-core/src/main/java/org/apache/camel/impl/DefaultRouteContext.java
@@ -145,7 +145,7 @@ public void commit() {
wrapper.setProcessor(unitOfWorkProcessor);
// and create the route that wraps the UoW
- Route edcr = new EventDrivenConsumerRoute(getEndpoint(), wrapper);
+ Route edcr = new EventDrivenConsumerRoute(this, getEndpoint(), wrapper);
edcr.getProperties().put(Route.ID_PROPERTY, route.idOrCreate(getCamelContext().getNodeIdFactory()));
edcr.getProperties().put(Route.PARENT_PROPERTY, Integer.toHexString(route.hashCode()));
if (route.getGroup() != null) {
diff --git a/camel-core/src/main/java/org/apache/camel/impl/EventDrivenConsumerRoute.java b/camel-core/src/main/java/org/apache/camel/impl/EventDrivenConsumerRoute.java
index cf491137a8f6c..1f4807d2a01c0 100644
--- a/camel-core/src/main/java/org/apache/camel/impl/EventDrivenConsumerRoute.java
+++ b/camel-core/src/main/java/org/apache/camel/impl/EventDrivenConsumerRoute.java
@@ -23,6 +23,7 @@
import org.apache.camel.Navigate;
import org.apache.camel.Processor;
import org.apache.camel.Service;
+import org.apache.camel.spi.RouteContext;
import org.apache.camel.management.InstrumentationProcessor;
/**
@@ -34,8 +35,8 @@
public class EventDrivenConsumerRoute extends DefaultRoute {
private final Processor processor;
- public EventDrivenConsumerRoute(Endpoint endpoint, Processor processor) {
- super(endpoint);
+ public EventDrivenConsumerRoute(RouteContext routeContext, Endpoint endpoint, Processor processor) {
+ super(routeContext, endpoint);
this.processor = processor;
}
diff --git a/camel-core/src/main/java/org/apache/camel/impl/RouteService.java b/camel-core/src/main/java/org/apache/camel/impl/RouteService.java
index abdbf3f6789cd..a7820d308f241 100644
--- a/camel-core/src/main/java/org/apache/camel/impl/RouteService.java
+++ b/camel-core/src/main/java/org/apache/camel/impl/RouteService.java
@@ -45,10 +45,10 @@ public class RouteService extends ServiceSupport {
private final DefaultCamelContext camelContext;
private final RouteDefinition routeDefinition;
private final List<RouteContext> routeContexts;
- private final Collection<Route> routes;
+ private final List<Route> routes;
private final String id;
- public RouteService(DefaultCamelContext camelContext, RouteDefinition routeDefinition, List<RouteContext> routeContexts, Collection<Route> routes) {
+ public RouteService(DefaultCamelContext camelContext, RouteDefinition routeDefinition, List<RouteContext> routeContexts, List<Route> routes) {
this.camelContext = camelContext;
this.routeDefinition = routeDefinition;
this.routeContexts = routeContexts;
diff --git a/camel-core/src/main/java/org/apache/camel/management/DefaultManagedLifecycleStrategy.java b/camel-core/src/main/java/org/apache/camel/management/DefaultManagedLifecycleStrategy.java
index 0894a1a811864..083fec9cfb2d4 100644
--- a/camel-core/src/main/java/org/apache/camel/management/DefaultManagedLifecycleStrategy.java
+++ b/camel-core/src/main/java/org/apache/camel/management/DefaultManagedLifecycleStrategy.java
@@ -43,6 +43,7 @@
import org.apache.camel.management.mbean.ManagedProcessor;
import org.apache.camel.management.mbean.ManagedProducer;
import org.apache.camel.management.mbean.ManagedRoute;
+import org.apache.camel.management.mbean.ManagedSendProcessor;
import org.apache.camel.management.mbean.ManagedThrottler;
import org.apache.camel.model.AOPDefinition;
import org.apache.camel.model.InterceptDefinition;
@@ -51,6 +52,7 @@
import org.apache.camel.model.ProcessorDefinition;
import org.apache.camel.model.RouteDefinition;
import org.apache.camel.processor.Delayer;
+import org.apache.camel.processor.SendProcessor;
import org.apache.camel.processor.Throttler;
import org.apache.camel.spi.BrowsableEndpoint;
import org.apache.camel.spi.ClassResolver;
@@ -316,10 +318,10 @@ private Object createManagedObjectForProcessor(CamelContext context, Processor p
return new ManagedDelayer(context, (Delayer) processor, definition);
} else if (processor instanceof Throttler) {
return new ManagedThrottler(context, (Throttler) processor, definition);
+ } else if (processor instanceof SendProcessor) {
+ return new ManagedSendProcessor(context, (SendProcessor) processor, definition);
}
- // TODO Add more specialized support for processors such as SendTo, WireTap etc.
-
// fallback to a generic processor
return new ManagedProcessor(context, processor, definition);
}
diff --git a/camel-core/src/main/java/org/apache/camel/management/mbean/ManagedSendProcessor.java b/camel-core/src/main/java/org/apache/camel/management/mbean/ManagedSendProcessor.java
new file mode 100644
index 0000000000000..739ef4d8f7f18
--- /dev/null
+++ b/camel-core/src/main/java/org/apache/camel/management/mbean/ManagedSendProcessor.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.management.mbean;
+
+import org.apache.camel.CamelContext;
+import org.apache.camel.Endpoint;
+import org.apache.camel.model.ProcessorDefinition;
+import org.apache.camel.processor.SendProcessor;
+import org.springframework.jmx.export.annotation.ManagedAttribute;
+import org.springframework.jmx.export.annotation.ManagedOperation;
+import org.springframework.jmx.export.annotation.ManagedResource;
+
+/**
+ * @version $Revision$
+ */
+@ManagedResource(description = "Managed SendProcessor")
+public class ManagedSendProcessor extends ManagedProcessor {
+
+ private SendProcessor processor;
+
+ public ManagedSendProcessor(CamelContext context, SendProcessor processor, ProcessorDefinition definition) {
+ super(context, processor, definition);
+ this.processor = processor;
+ }
+
+ public SendProcessor getProcessor() {
+ return processor;
+ }
+
+ @ManagedAttribute(description = "Destination as Endpoint Uri")
+ public String getDestination() {
+ return processor.getDestination().getEndpointUri();
+ }
+
+ @ManagedAttribute(description = "Message Exchange Pattern")
+ public String getMessageExchangePattern() {
+ if (processor.getPattern() != null) {
+ return processor.getPattern().name();
+ } else {
+ return null;
+ }
+ }
+
+ @ManagedOperation(description = "Change Destination Endpoint Uri")
+ public void changeDestination(String uri) throws Exception {
+ Endpoint endpoint = getContext().getEndpoint(uri);
+ processor.setDestination(endpoint);
+ }
+
+}
diff --git a/camel-core/src/main/java/org/apache/camel/processor/SendProcessor.java b/camel-core/src/main/java/org/apache/camel/processor/SendProcessor.java
index 8a9491d4d84e1..20c587b5d0f77 100644
--- a/camel-core/src/main/java/org/apache/camel/processor/SendProcessor.java
+++ b/camel-core/src/main/java/org/apache/camel/processor/SendProcessor.java
@@ -56,6 +56,11 @@ public String toString() {
return "sendTo(" + destination + (pattern != null ? " " + pattern : "") + ")";
}
+ public synchronized void setDestination(Endpoint destination) {
+ this.destination = destination;
+ this.init = false;
+ }
+
public String getTraceLabel() {
return destination.getEndpointUri();
}
@@ -97,6 +102,10 @@ public Endpoint getDestination() {
return destination;
}
+ public ExchangePattern getPattern() {
+ return pattern;
+ }
+
protected Exchange configureExchange(Exchange exchange, ExchangePattern pattern) {
if (pattern != null) {
exchange.setPattern(pattern);
diff --git a/camel-core/src/test/java/org/apache/camel/management/ManagedSendProcessorTest.java b/camel-core/src/test/java/org/apache/camel/management/ManagedSendProcessorTest.java
new file mode 100644
index 0000000000000..306efa9b9d191
--- /dev/null
+++ b/camel-core/src/test/java/org/apache/camel/management/ManagedSendProcessorTest.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.management;
+
+import javax.management.MBeanServer;
+import javax.management.ObjectName;
+
+import org.apache.camel.CamelContext;
+import org.apache.camel.ContextTestSupport;
+import org.apache.camel.builder.RouteBuilder;
+import org.apache.camel.component.mock.MockEndpoint;
+
+/**
+ * @version $Revision$
+ */
+public class ManagedSendProcessorTest extends ContextTestSupport {
+
+ @Override
+ protected CamelContext createCamelContext() throws Exception {
+ CamelContext context = super.createCamelContext();
+ DefaultManagementNamingStrategy naming = (DefaultManagementNamingStrategy) context.getManagementStrategy().getManagementNamingStrategy();
+ naming.setHostName("localhost");
+ naming.setDomainName("org.apache.camel");
+ return context;
+ }
+
+ @SuppressWarnings("unchecked")
+ public void testManageSendProcessor() throws Exception {
+ MockEndpoint result = getMockEndpoint("mock:result");
+ result.expectedMessageCount(1);
+ MockEndpoint foo = getMockEndpoint("mock:foo");
+ foo.expectedMessageCount(0);
+
+ template.sendBody("direct:start", "Hello World");
+
+ assertMockEndpointsSatisfied();
+
+ // get the stats for the route
+ MBeanServer mbeanServer = context.getManagementStrategy().getManagementAgent().getMBeanServer();
+
+ // get the object name for the delayer
+ ObjectName on = ObjectName.getInstance("org.apache.camel:context=localhost/camel-1,type=processors,name=\"mysend\"");
+
+ // send it somewhere else
+ mbeanServer.invoke(on, "changeDestination", new Object[]{"direct:foo"}, new String[]{"java.lang.String"});
+
+ // prepare mocks
+ result.reset();
+ result.expectedMessageCount(0);
+ foo.reset();
+ foo.expectedMessageCount(1);
+
+ // send in another message that should be sent to mock:foo
+ template.sendBody("direct:start", "Bye World");
+
+ assertMockEndpointsSatisfied();
+ }
+
+ @Override
+ protected RouteBuilder createRouteBuilder() throws Exception {
+ return new RouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ from("direct:start")
+ .to("mock:result").id("mysend");
+
+ from("direct:foo").to("mock:foo");
+ }
+ };
+ }
+
+}
|
8467928cf0639c9783a5f03168af547ffe8a23c8
|
camel
|
CAMEL-4023 Properties to Cxf ClientProxyFactory- can be set on endpoint uri or CxfEndpoint bean.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1128561 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/camel
|
diff --git a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfComponent.java b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfComponent.java
index 5133229a23c88..f7bfe4b9b5713 100644
--- a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfComponent.java
+++ b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfComponent.java
@@ -88,6 +88,8 @@ protected Endpoint createEndpoint(String uri, String remaining,
Map<String, Object> properties = IntrospectionSupport.extractProperties(parameters, "properties.");
if (properties != null) {
result.setProperties(properties);
+ // set the properties of MTOM
+ result.setMtomEnabled(Boolean.valueOf((String)properties.get(Message.MTOM_ENABLED)));
}
return result;
diff --git a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfEndpoint.java b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfEndpoint.java
index 10f70ea88739c..d66ae60c8076a 100644
--- a/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfEndpoint.java
+++ b/components/camel-cxf/src/main/java/org/apache/camel/component/cxf/CxfEndpoint.java
@@ -188,12 +188,12 @@ protected void setupServerFactoryBean(ServerFactoryBean sfb, Class<?> cls) {
}
// any optional properties
- if (properties != null) {
+ if (getProperties() != null) {
if (sfb.getProperties() != null) {
// add to existing properties
- sfb.getProperties().putAll(properties);
+ sfb.getProperties().putAll(getProperties());
} else {
- sfb.setProperties(properties);
+ sfb.setProperties(getProperties());
}
LOG.debug("ServerFactoryBean: {} added properties: {}", sfb, properties);
}
@@ -299,6 +299,17 @@ protected void setupClientFactoryBean(ClientProxyFactoryBean factoryBean, Class<
factoryBean.getServiceFactory().setWrapped(getWrappedStyle());
}
+ // set the properties on CxfProxyFactoryBean
+ if (getProperties() != null) {
+ if (factoryBean.getProperties() != null) {
+ // add to existing properties
+ factoryBean.getProperties().putAll(getProperties());
+ } else {
+ factoryBean.setProperties(getProperties());
+ }
+ LOG.debug("ClientProxyFactoryBean: {} added properties: {}", factoryBean, properties);
+ }
+
factoryBean.setBus(getBus());
}
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfJavaOnlyPayloadModeTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfJavaOnlyPayloadModeTest.java
index 7fce8263687f6..cf69d4ee9b6b4 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfJavaOnlyPayloadModeTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfJavaOnlyPayloadModeTest.java
@@ -36,7 +36,8 @@ public class CxfJavaOnlyPayloadModeTest extends CamelTestSupport {
+ "?wsdlURL=classpath:person.wsdl"
+ "&serviceName={http://camel.apache.org/wsdl-first}PersonService"
+ "&portName={http://camel.apache.org/wsdl-first}soap"
- + "&dataFormat=PAYLOAD";
+ + "&dataFormat=PAYLOAD"
+ + "&properties.exceptionMessageCauseEnabled=true&properties.faultStackTraceEnabled=true";
@Test
public void testCxfJavaOnly() throws Exception {
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/mtom/CxfMtomConsumerTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/mtom/CxfMtomConsumerTest.java
index 71c0d77041751..275412bfb2883 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/mtom/CxfMtomConsumerTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/mtom/CxfMtomConsumerTest.java
@@ -40,7 +40,7 @@
public class CxfMtomConsumerTest extends CamelTestSupport {
protected static final String MTOM_ENDPOINT_ADDRESS = "http://localhost:9091/jaxws-mtom/hello";
protected static final String MTOM_ENDPOINT_URI = "cxf://" + MTOM_ENDPOINT_ADDRESS
- + "?serviceClass=org.apache.camel.component.cxf.HelloImpl";
+ + "?serviceClass=org.apache.camel.cxf.mtom_feature.Hello";
private final QName serviceName = new QName("http://apache.org/camel/cxf/mtom_feature", "HelloService");
@@ -83,12 +83,12 @@ private Hello getPort() {
return service.getHelloPort();
}
- private Image getImage(String name) throws Exception {
+ protected Image getImage(String name) throws Exception {
return ImageIO.read(getClass().getResource(name));
}
@Test
- public void testInvokingServiceFromCXFClient() throws Exception {
+ public void testInvokingService() throws Exception {
if (Boolean.getBoolean("java.awt.headless")
|| System.getProperty("os.name").startsWith("Mac OS") && System.getProperty("user.name").equals("cruise")) {
|
8dc78ca7af53778f2e156e575551a3bcab1d1f7b
|
intellij-community
|
IDEA-70843 (fix Hector slider for GTK+ L&F)--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/platform/lang-impl/src/com/intellij/codeInsight/daemon/impl/HectorComponent.java b/platform/lang-impl/src/com/intellij/codeInsight/daemon/impl/HectorComponent.java
index 1792a1f8a9272..93ea6d6eb88e8 100644
--- a/platform/lang-impl/src/com/intellij/codeInsight/daemon/impl/HectorComponent.java
+++ b/platform/lang-impl/src/com/intellij/codeInsight/daemon/impl/HectorComponent.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2000-2009 JetBrains s.r.o.
+ * Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -50,6 +50,7 @@
import javax.swing.event.ChangeListener;
import javax.swing.event.HyperlinkEvent;
import javax.swing.event.HyperlinkListener;
+import javax.swing.plaf.basic.BasicSliderUI;
import java.awt.*;
import java.lang.ref.WeakReference;
import java.util.*;
@@ -82,6 +83,7 @@ public HectorComponent(PsiFile file) {
final FileViewProvider viewProvider = myFile.getViewProvider();
final Set<Language> languages = viewProvider.getLanguages();
for (Language language : languages) {
+ @SuppressWarnings("UseOfObsoleteCollectionType")
final Hashtable<Integer, JLabel> sliderLabels = new Hashtable<Integer, JLabel>();
sliderLabels.put(1, new JLabel(EditorBundle.message("hector.none.slider.label")));
sliderLabels.put(2, new JLabel(EditorBundle.message("hector.syntax.slider.label")));
@@ -90,9 +92,13 @@ public HectorComponent(PsiFile file) {
}
final JSlider slider = new JSlider(SwingConstants.VERTICAL, 1, notInLibrary ? 3 : 2, 1);
+ if (UIUtil.isUnderGTKLookAndFeel()) {
+ // default GTK+ slider UI is way too ugly
+ slider.putClientProperty("Slider.paintThumbArrowShape", true);
+ slider.setUI(new BasicSliderUI(slider));
+ }
slider.setLabelTable(sliderLabels);
- final boolean value = true;
- UIUtil.setSliderIsFilled(slider, value);
+ UIUtil.setSliderIsFilled(slider, true);
slider.setPaintLabels(true);
slider.setSnapToTicks(true);
slider.addChangeListener(new ChangeListener() {
@@ -104,13 +110,13 @@ public void stateChanged(ChangeEvent e) {
}
}
});
+
final PsiFile psiRoot = viewProvider.getPsi(language);
+ assert psiRoot != null : "No root in " + viewProvider + " for " + language;
slider.setValue(getValue(HighlightLevelUtil.shouldHighlight(psiRoot), HighlightLevelUtil.shouldInspect(psiRoot)));
mySliders.put(language, slider);
}
- final DaemonCodeAnalyzer analyzer = DaemonCodeAnalyzer.getInstance(myFile.getProject());
-
GridBagConstraints gc = new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 0, 0, GridBagConstraints.NORTHWEST,
GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0);
@@ -249,6 +255,7 @@ private void forceDaemonRestart() {
for (Language language : mySliders.keySet()) {
JSlider slider = mySliders.get(language);
PsiElement root = viewProvider.getPsi(language);
+ assert root != null : "No root in " + viewProvider + " for " + language;
int value = slider.getValue();
if (value == 1) {
HighlightLevelUtil.forceRootHighlighting(root, FileHighlighingSetting.SKIP_HIGHLIGHTING);
@@ -269,7 +276,7 @@ private boolean isModified() {
for (Language language : mySliders.keySet()) {
JSlider slider = mySliders.get(language);
final PsiFile root = viewProvider.getPsi(language);
- if (getValue(HighlightLevelUtil.shouldHighlight(root), HighlightLevelUtil.shouldInspect(root)) != slider.getValue()) {
+ if (root != null && getValue(HighlightLevelUtil.shouldHighlight(root), HighlightLevelUtil.shouldInspect(root)) != slider.getValue()) {
return true;
}
}
|
b1cbaf9f3a43ea9bb9ef31578fb5c765cccf7fdb
|
orientdb
|
Fixed issue on delete when the database is not- yet opened--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocal.java b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocal.java
index 9f197546990..ee35f636843 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocal.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocal.java
@@ -280,9 +280,10 @@ public void close() {
public void delete() {
final long timer = OProfiler.getInstance().startChrono();
- // CLOSE THE DATABASE BY REMOVING THE CURRENT USER
- if (removeUser() > 0)
- throw new OStorageException("Can't delete a storage open");
+ if (!isClosed())
+ // CLOSE THE DATABASE BY REMOVING THE CURRENT USER
+ if (removeUser() > 0)
+ throw new OStorageException("Can't delete a storage open");
// GET REAL DIRECTORY
File dbDir = new File(url);
@@ -325,7 +326,7 @@ public void delete() {
}
}
- throw new OStorageException("Can't delete database '" + name + "' located in: " + dbDir);
+ throw new OStorageException("Can't delete database '" + name + "' located in: " + dbDir + ". Database files seems locked");
} finally {
releaseExclusiveLock(locked);
@@ -1014,9 +1015,9 @@ protected int updateRecord(final int iRequesterId, final OCluster iClusterSegmen
throw new OConcurrentModificationException(
"Can't update record #"
+ recId
- + " because it has been modified by another user (v."
+ + " because it has been modified by another user (v"
+ ppos.version
- + " != v."
+ + " != v"
+ iVersion
+ ") in the meanwhile of current transaction. Use pessimistic locking instead of optimistic or simply re-execute the transaction");
|
07165bab64bd1d62bd5eee176f084dd9317a97b8
|
camel
|
corrected typo.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1331967 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/test/java/org/apache/camel/issues/SingleOutputInMulticastIssueTest.java b/camel-core/src/test/java/org/apache/camel/issues/SingleOutputInMulticastIssueTest.java
index 831b17715441d..6aa8164be28c5 100644
--- a/camel-core/src/test/java/org/apache/camel/issues/SingleOutputInMulticastIssueTest.java
+++ b/camel-core/src/test/java/org/apache/camel/issues/SingleOutputInMulticastIssueTest.java
@@ -26,7 +26,7 @@ public class SingleOutputInMulticastIssueTest extends ContextTestSupport {
public void testSingleOutputInMulticastIssue() throws Exception {
getMockEndpoint("mock:error").expectedMessageCount(1);
- getMockEndpoint("mock:auit").expectedMessageCount(0);
+ getMockEndpoint("mock:audit").expectedMessageCount(0);
template.sendBody("direct:start", "Hello World");
|
0c82fc5901c4ca81ff9226f703cf575194c92948
|
elasticsearch
|
Remove Infinity values for Range facets when no- docs match the range, closes -1366.--
|
p
|
https://github.com/elastic/elasticsearch
|
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/search/facet/range/InternalRangeFacet.java b/modules/elasticsearch/src/main/java/org/elasticsearch/search/facet/range/InternalRangeFacet.java
index 47d55af929c9e..722de608992ee 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/search/facet/range/InternalRangeFacet.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/search/facet/range/InternalRangeFacet.java
@@ -180,8 +180,11 @@ static final class Fields {
builder.field(Fields.TO_STR, entry.toAsString);
}
builder.field(Fields.COUNT, entry.count());
- builder.field(Fields.MIN, entry.min());
- builder.field(Fields.MAX, entry.max());
+ // only output min and max if there are actually documents matching this range...
+ if (entry.totalCount() > 0) {
+ builder.field(Fields.MIN, entry.min());
+ builder.field(Fields.MAX, entry.max());
+ }
builder.field(Fields.TOTAL_COUNT, entry.totalCount());
builder.field(Fields.TOTAL, entry.total());
builder.field(Fields.MEAN, entry.mean());
|
f119ea70438e3ff3d72678086ce4c58441a445a7
|
orientdb
|
GraphDB: supported 3 locking modes: no locking,- database level locking (default) and record level locking--
|
a
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/db/graph/OGraphDatabase.java b/core/src/main/java/com/orientechnologies/orient/core/db/graph/OGraphDatabase.java
index e318c7141d9..564fcd31ea6 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/db/graph/OGraphDatabase.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/db/graph/OGraphDatabase.java
@@ -32,6 +32,7 @@
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.impl.ODocument;
+import com.orientechnologies.orient.core.storage.OStorage;
import com.orientechnologies.orient.core.storage.OStorageEmbedded;
import com.orientechnologies.orient.core.tx.OTransactionNoTx;
import com.orientechnologies.orient.core.type.tree.OMVRBTreeRIDSet;
@@ -47,6 +48,10 @@
*
*/
public class OGraphDatabase extends ODatabaseDocumentTx {
+ public enum LOCK_MODE {
+ NO_LOCKING, DATABASE_LEVEL_LOCKING, RECORD_LEVEL_LOCKING
+ }
+
public static final String TYPE = "graph";
public static final String VERTEX_CLASS_NAME = "OGraphVertex";
@@ -62,7 +67,7 @@ public class OGraphDatabase extends ODatabaseDocumentTx {
private boolean useCustomTypes = true;
private boolean safeMode = false;
- private boolean autoLock = true;
+ private LOCK_MODE lockMode = LOCK_MODE.DATABASE_LEVEL_LOCKING;
protected OClass vertexBaseClass;
protected OClass edgeBaseClass;
@@ -80,11 +85,6 @@ public OGraphDatabase(final ODatabaseRecordTx iSource) {
public <THISDB extends ODatabase> THISDB open(final String iUserName, final String iUserPassword) {
super.open(iUserName, iUserPassword);
checkForGraphSchema();
-
- if (autoLock && !(getStorage() instanceof OStorageEmbedded))
- // NOT YET SUPPORETD REMOTE LOCKING
- autoLock = false;
-
return (THISDB) this;
}
@@ -218,10 +218,9 @@ public ODocument createEdge(final ODocument iOutVertex, final ODocument iInVerte
edge.field(iFields[i].toString(), iFields[i + 1]);
// OUT FIELD
- if (autoLock)
- // LOCK VERTEX TO AVOID CONCURRENT ACCESS
- acquireWriteLock(iOutVertex);
+ acquireWriteLock(iOutVertex);
try {
+
final Object outField = iOutVertex.field(VERTEX_FIELD_OUT);
final OMVRBTreeRIDSet out;
if (outField instanceof OMVRBTreeRIDSet) {
@@ -235,15 +234,13 @@ public ODocument createEdge(final ODocument iOutVertex, final ODocument iInVerte
}
out.add(edge);
} finally {
- if (autoLock)
- releaseWriteLock(iOutVertex);
+ releaseWriteLock(iOutVertex);
}
// IN FIELD
- if (autoLock)
- // LOCK VERTEX TO AVOID CONCURRENT ACCESS
- acquireWriteLock(iInVertex);
+ acquireWriteLock(iInVertex);
try {
+
final Object inField = iInVertex.field(VERTEX_FIELD_IN);
final OMVRBTreeRIDSet in;
if (inField instanceof OMVRBTreeRIDSet) {
@@ -256,9 +253,9 @@ public ODocument createEdge(final ODocument iOutVertex, final ODocument iInVerte
iInVertex.field(VERTEX_FIELD_IN, in);
}
in.add(edge);
+
} finally {
- if (autoLock)
- releaseWriteLock(iInVertex);
+ releaseWriteLock(iInVertex);
}
edge.setDirty();
@@ -290,8 +287,7 @@ public boolean removeEdge(final OIdentifiable iEdge) {
// OUT VERTEX
final ODocument outVertex = edge.field(EDGE_FIELD_OUT);
- if (autoLock)
- acquireWriteLock(outVertex);
+ acquireWriteLock(outVertex);
try {
if (outVertex != null) {
@@ -302,15 +298,13 @@ public boolean removeEdge(final OIdentifiable iEdge) {
}
} finally {
- if (autoLock)
- releaseWriteLock(outVertex);
+ releaseWriteLock(outVertex);
}
// IN VERTEX
final ODocument inVertex = edge.field(EDGE_FIELD_IN);
- if (autoLock)
- acquireWriteLock(inVertex);
+ acquireWriteLock(inVertex);
try {
if (inVertex != null) {
@@ -321,8 +315,7 @@ public boolean removeEdge(final OIdentifiable iEdge) {
}
} finally {
- if (autoLock)
- releaseWriteLock(inVertex);
+ releaseWriteLock(inVertex);
}
delete(edge);
@@ -344,10 +337,9 @@ public void removeVertex(final ODocument iVertex) {
Set<ODocument> otherEdges;
// REMOVE OUT EDGES
- if (autoLock)
- // LOCK VERTEX TO AVOID CONCURRENT ACCESS
- acquireWriteLock(iVertex);
+ acquireWriteLock(iVertex);
try {
+
Set<ODocument> edges = iVertex.field(VERTEX_FIELD_OUT);
if (edges != null) {
for (ODocument edge : edges) {
@@ -383,8 +375,7 @@ public void removeVertex(final ODocument iVertex) {
delete(iVertex);
} finally {
- if (autoLock)
- releaseWriteLock(iVertex);
+ releaseWriteLock(iVertex);
}
commitBlock(safeMode);
@@ -444,9 +435,7 @@ public Set<OIdentifiable> getEdgesBetweenVertexes(final OIdentifiable iVertex1,
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
if (iVertex1 != null && iVertex2 != null) {
- if (autoLock)
- // LOCK VERTEX TO AVOID CONCURRENT ACCESS
- acquireReadLock(iVertex1);
+ acquireReadLock(iVertex1);
try {
// CHECK OUT EDGES
@@ -472,8 +461,7 @@ public Set<OIdentifiable> getEdgesBetweenVertexes(final OIdentifiable iVertex1,
}
} finally {
- if (autoLock)
- releaseReadLock(iVertex1);
+ releaseReadLock(iVertex1);
}
}
@@ -500,8 +488,7 @@ public Set<OIdentifiable> getOutEdges(final OIdentifiable iVertex, final String
OMVRBTreeRIDSet result = null;
- if (autoLock)
- acquireReadLock(iVertex);
+ acquireReadLock(iVertex);
try {
final OMVRBTreeRIDSet set = vertex.field(VERTEX_FIELD_OUT);
@@ -522,8 +509,7 @@ public Set<OIdentifiable> getOutEdges(final OIdentifiable iVertex, final String
}
} finally {
- if (autoLock)
- releaseReadLock(iVertex);
+ releaseReadLock(iVertex);
}
return result;
@@ -571,8 +557,7 @@ public Set<OIdentifiable> getInEdges(final OIdentifiable iVertex, final String i
OMVRBTreeRIDSet result = null;
- if (autoLock)
- acquireReadLock(iVertex);
+ acquireReadLock(iVertex);
try {
final OMVRBTreeRIDSet set = vertex.field(VERTEX_FIELD_IN);
@@ -593,8 +578,7 @@ public Set<OIdentifiable> getInEdges(final OIdentifiable iVertex, final String i
}
} finally {
- if (autoLock)
- releaseReadLock(iVertex);
+ releaseReadLock(iVertex);
}
return result;
}
@@ -664,8 +648,7 @@ public ODocument getOutVertex(final OIdentifiable iEdge) {
}
public Set<OIdentifiable> filterEdgesByProperties(final OMVRBTreeRIDSet iEdges, final Iterable<String> iPropertyNames) {
- if (autoLock)
- acquireReadLock(null);
+ acquireReadLock(null);
try {
if (iPropertyNames == null)
@@ -690,14 +673,12 @@ public Set<OIdentifiable> filterEdgesByProperties(final OMVRBTreeRIDSet iEdges,
return result;
} finally {
- if (autoLock)
- releaseReadLock(null);
+ releaseReadLock(null);
}
}
public Set<OIdentifiable> filterEdgesByProperties(final OMVRBTreeRIDSet iEdges, final Map<String, Object> iProperties) {
- if (autoLock)
- acquireReadLock(null);
+ acquireReadLock(null);
try {
if (iProperties == null)
@@ -727,8 +708,7 @@ public Set<OIdentifiable> filterEdgesByProperties(final OMVRBTreeRIDSet iEdges,
return result;
} finally {
- if (autoLock)
- releaseReadLock(null);
+ releaseReadLock(null);
}
}
@@ -886,14 +866,6 @@ public boolean isEdge(final ODocument iRecord) {
return iRecord != null ? iRecord.getSchemaClass().isSubClassOf(edgeBaseClass) : false;
}
- public boolean isAutoLock() {
- return autoLock;
- }
-
- public void setAutoLock(final boolean iAutoLock) {
- this.autoLock = iAutoLock;
- }
-
/**
* Locks the record in exclusive mode to avoid concurrent access.
*
@@ -902,8 +874,16 @@ public void setAutoLock(final boolean iAutoLock) {
* @return The current instance as fluent interface to allow calls in chain.
*/
public OGraphDatabase acquireWriteLock(final OIdentifiable iRecord) {
- ((OStorageEmbedded) getStorage()).getLock().acquireExclusiveLock();
- // ((OStorageEmbedded) getStorage()).acquireWriteLock(iRecord.getIdentity());
+ switch (lockMode) {
+ case DATABASE_LEVEL_LOCKING:
+ ((OStorage) getStorage()).getLock().acquireExclusiveLock();
+ break;
+ case RECORD_LEVEL_LOCKING:
+ ((OStorageEmbedded) getStorage()).acquireWriteLock(iRecord.getIdentity());
+ break;
+ case NO_LOCKING:
+ break;
+ }
return this;
}
@@ -915,8 +895,16 @@ public OGraphDatabase acquireWriteLock(final OIdentifiable iRecord) {
* @return The current instance as fluent interface to allow calls in chain.
*/
public OGraphDatabase releaseWriteLock(final OIdentifiable iRecord) {
- ((OStorageEmbedded) getStorage()).getLock().releaseExclusiveLock();
- // ((OStorageEmbedded) getStorage()).releaseWriteLock(iRecord.getIdentity());
+ switch (lockMode) {
+ case DATABASE_LEVEL_LOCKING:
+ ((OStorage) getStorage()).getLock().releaseExclusiveLock();
+ break;
+ case RECORD_LEVEL_LOCKING:
+ ((OStorageEmbedded) getStorage()).releaseWriteLock(iRecord.getIdentity());
+ break;
+ case NO_LOCKING:
+ break;
+ }
return this;
}
@@ -928,8 +916,16 @@ public OGraphDatabase releaseWriteLock(final OIdentifiable iRecord) {
* @return The current instance as fluent interface to allow calls in chain.
*/
public OGraphDatabase acquireReadLock(final OIdentifiable iRecord) {
- ((OStorageEmbedded) getStorage()).getLock().acquireSharedLock();
- // ((OStorageEmbedded) getStorage()).acquireReadLock(iRecord.getIdentity());
+ switch (lockMode) {
+ case DATABASE_LEVEL_LOCKING:
+ ((OStorage) getStorage()).getLock().acquireSharedLock();
+ break;
+ case RECORD_LEVEL_LOCKING:
+ ((OStorageEmbedded) getStorage()).acquireReadLock(iRecord.getIdentity());
+ break;
+ case NO_LOCKING:
+ break;
+ }
return this;
}
@@ -941,8 +937,16 @@ public OGraphDatabase acquireReadLock(final OIdentifiable iRecord) {
* @return The current instance as fluent interface to allow calls in chain.
*/
public OGraphDatabase releaseReadLock(final OIdentifiable iRecord) {
- ((OStorageEmbedded) getStorage()).getLock().releaseSharedLock();
- // ((OStorageEmbedded) getStorage()).releaseReadLock(iRecord.getIdentity());
+ switch (lockMode) {
+ case DATABASE_LEVEL_LOCKING:
+ ((OStorage) getStorage()).getLock().releaseSharedLock();
+ break;
+ case RECORD_LEVEL_LOCKING:
+ ((OStorageEmbedded) getStorage()).releaseReadLock(iRecord.getIdentity());
+ break;
+ case NO_LOCKING:
+ break;
+ }
return this;
}
@@ -1024,4 +1028,16 @@ protected boolean checkEdge(final ODocument iEdge, final String[] iLabels, final
}
return good;
}
+
+ public LOCK_MODE getLockMode() {
+ return lockMode;
+ }
+
+ public void setLockMode(final LOCK_MODE lockMode) {
+ if (lockMode == LOCK_MODE.RECORD_LEVEL_LOCKING && !(getStorage() instanceof OStorageEmbedded))
+ // NOT YET SUPPORETD REMOTE LOCKING
+ throw new IllegalArgumentException("Record leve locking is not supported for remote connections");
+
+ this.lockMode = lockMode;
+ }
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/OStorageEmbedded.java b/core/src/main/java/com/orientechnologies/orient/core/storage/OStorageEmbedded.java
index ed8bacc60ca..0c4bae3a7f5 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/OStorageEmbedded.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/OStorageEmbedded.java
@@ -17,6 +17,7 @@
import java.io.IOException;
+import com.orientechnologies.common.concur.lock.OLockManager.LOCK;
import com.orientechnologies.common.exception.OException;
import com.orientechnologies.orient.core.command.OCommandExecutor;
import com.orientechnologies.orient.core.command.OCommandManager;
@@ -52,10 +53,10 @@ public OStorageEmbedded(final String iName, final String iFilePath, final String
PROFILER_DELETE_RECORD = "db." + name + ".deleteRecord";
}
- protected abstract ORawBuffer readRecord(final OCluster iClusterSegment, final ORecordId iRid, boolean iAtomicLock);
-
public abstract OCluster getClusterByName(final String iClusterName);
+ protected abstract ORawBuffer readRecord(final OCluster iClusterSegment, final ORecordId iRid, boolean iAtomicLock);
+
/**
* Closes the storage freeing the lock manager first.
*/
@@ -94,14 +95,6 @@ public Object executeCommand(final OCommandRequestText iCommand, final OCommandE
}
}
- /**
- * Checks if the storage is open. If it's closed an exception is raised.
- */
- protected void checkOpeness() {
- if (status != STATUS.OPEN)
- throw new OStorageException("Storage " + name + " is not opened.");
- }
-
@Override
public long[] getClusterPositionsForEntry(int currentClusterId, long entry) {
if (currentClusterId == -1)
@@ -126,6 +119,22 @@ public long[] getClusterPositionsForEntry(int currentClusterId, long entry) {
}
}
+ public void acquireWriteLock(final ORID iRid) {
+ lockManager.acquireLock(Thread.currentThread(), iRid, LOCK.EXCLUSIVE);
+ }
+
+ public void releaseWriteLock(final ORID iRid) {
+ lockManager.releaseLock(Thread.currentThread(), iRid, LOCK.EXCLUSIVE);
+ }
+
+ public void acquireReadLock(final ORID iRid) {
+ lockManager.acquireLock(Thread.currentThread(), iRid, LOCK.SHARED);
+ }
+
+ public void releaseReadLock(final ORID iRid) {
+ lockManager.releaseLock(Thread.currentThread(), iRid, LOCK.SHARED);
+ }
+
protected OPhysicalPosition moveRecord(ORID originalId, ORID newId) throws IOException {
final OCluster originalCluster = getClusterById(originalId.getClusterId());
final OCluster destinationCluster = getClusterById(newId.getClusterId());
@@ -190,4 +199,12 @@ protected OPhysicalPosition moveRecord(ORID originalId, ORID newId) throws IOExc
return ppos;
}
+
+ /**
+ * Checks if the storage is open. If it's closed an exception is raised.
+ */
+ protected void checkOpeness() {
+ if (status != STATUS.OPEN)
+ throw new OStorageException("Storage " + name + " is not opened.");
+ }
}
|
146eb941d0babe160c0ee0e5f09600ce3d2f9145
|
kotlin
|
Config refactoring.--
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/js/js.tests/test/org/jetbrains/k2js/test/TestConfig.java b/js/js.tests/test/org/jetbrains/k2js/config/TestConfig.java
similarity index 74%
rename from js/js.tests/test/org/jetbrains/k2js/test/TestConfig.java
rename to js/js.tests/test/org/jetbrains/k2js/config/TestConfig.java
index 189e4b8b049e8..c918ad287895b 100644
--- a/js/js.tests/test/org/jetbrains/k2js/test/TestConfig.java
+++ b/js/js.tests/test/org/jetbrains/k2js/config/TestConfig.java
@@ -14,14 +14,13 @@
* limitations under the License.
*/
-package org.jetbrains.k2js.test;
+package org.jetbrains.k2js.config;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.io.FileUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.jet.lang.psi.JetFile;
-import org.jetbrains.k2js.config.Config;
import org.jetbrains.k2js.utils.JetFileUtils;
import java.io.FileInputStream;
@@ -29,7 +28,6 @@
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.List;
/**
@@ -38,38 +36,12 @@
//TODO: review/refactor
public final class TestConfig extends Config {
- @NotNull
- private static final List<String> LIB_FILE_NAMES = Arrays.asList(
- "/core/annotations.kt",
- "/jquery/common.kt",
- "/jquery/ui.kt",
- "/core/javautil.kt",
- "/core/javalang.kt",
- "/core/core.kt",
- "/core/math.kt",
- "/core/json.kt",
- "/raphael/raphael.kt",
- "/html5/canvas.kt",
- "/html5/files.kt",
- "/html5/image.kt"
- );
-
- private static final String LIBRARIES_LOCATION = "js.libraries/src";
@Nullable
private /*var*/ List<JetFile> jsLibFiles = null;
- @NotNull
- private final Project project;
-
public TestConfig(@NotNull Project project) {
- this.project = project;
- }
-
- @NotNull
- @Override
- public Project getProject() {
- return project;
+ super(project);
}
@NotNull
diff --git a/js/js.translator/src/org/jetbrains/k2js/config/Config.java b/js/js.translator/src/org/jetbrains/k2js/config/Config.java
index 9bef13e122061..269ac50498f7b 100644
--- a/js/js.translator/src/org/jetbrains/k2js/config/Config.java
+++ b/js/js.translator/src/org/jetbrains/k2js/config/Config.java
@@ -20,6 +20,7 @@
import org.jetbrains.annotations.NotNull;
import org.jetbrains.jet.lang.psi.JetFile;
+import java.util.Arrays;
import java.util.List;
/**
@@ -30,9 +31,35 @@
public abstract class Config {
@NotNull
- public abstract Project getProject();
+ protected static final List<String> LIB_FILE_NAMES = Arrays.asList(
+ "/core/annotations.kt",
+ "/jquery/common.kt",
+ "/jquery/ui.kt",
+ "/core/javautil.kt",
+ "/core/javalang.kt",
+ "/core/core.kt",
+ "/core/math.kt",
+ "/core/json.kt",
+ "/raphael/raphael.kt",
+ "/html5/canvas.kt",
+ "/html5/files.kt",
+ "/html5/image.kt"
+ );
+
+ protected static final String LIBRARIES_LOCATION = "js.libraries/src";
@NotNull
- public abstract List<JetFile> getLibFiles();
+ private final Project project;
+ public Config(@NotNull Project project) {
+ this.project = project;
+ }
+
+ @NotNull
+ public Project getProject() {
+ return project;
+ }
+
+ @NotNull
+ public abstract List<JetFile> getLibFiles();
}
diff --git a/js/js.translator/src/org/jetbrains/k2js/config/IDEAConfig.java b/js/js.translator/src/org/jetbrains/k2js/config/IDEAConfig.java
index b421601c2740e..6788dec178c73 100644
--- a/js/js.translator/src/org/jetbrains/k2js/config/IDEAConfig.java
+++ b/js/js.translator/src/org/jetbrains/k2js/config/IDEAConfig.java
@@ -28,17 +28,8 @@
*/
public final class IDEAConfig extends Config {
- @NotNull
- private final Project project;
-
public IDEAConfig(@NotNull Project project) {
- this.project = project;
- }
-
- @NotNull
- @Override
- public Project getProject() {
- return project;
+ super(project);
}
@NotNull
|
70bd7099ec082958928b821e89fbb7c76b1ed01c
|
ReactiveX-RxJava
|
Switched to a default scheduler that actually works- together with this operator.--
|
c
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/operators/OperationInterval.java b/rxjava-core/src/main/java/rx/operators/OperationInterval.java
index 20e73a7521..0e03b7fe75 100644
--- a/rxjava-core/src/main/java/rx/operators/OperationInterval.java
+++ b/rxjava-core/src/main/java/rx/operators/OperationInterval.java
@@ -18,6 +18,7 @@
import static org.mockito.Matchers.*;
import static org.mockito.Mockito.*;
+import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
@@ -45,7 +46,7 @@ public final class OperationInterval {
* Creates an event each time interval.
*/
public static Func1<Observer<Long>, Subscription> interval(long interval, TimeUnit unit) {
- return new Interval(interval, unit, Schedulers.currentThread());
+ return new Interval(interval, unit, Schedulers.executor(Executors.newSingleThreadScheduledExecutor()));
}
/**
|
8933947b9549f6bbf4dd81034a6d47a22acdf895
|
elasticsearch
|
only pull Fields once from the reader--
|
c
|
https://github.com/elastic/elasticsearch
|
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionFieldStats.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionFieldStats.java
index e61c221a95937..90f00fb293ce7 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionFieldStats.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionFieldStats.java
@@ -20,6 +20,8 @@
package org.elasticsearch.search.suggest.completion;
import com.carrotsearch.hppc.ObjectLongHashMap;
+
+import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
@@ -32,28 +34,35 @@
public class CompletionFieldStats {
- public static CompletionStats completionStats(IndexReader indexReader, String ... fields) {
+ /**
+ * Returns total in-heap bytes used by all suggesters. This method is <code>O(numIndexedFields)</code>.
+ *
+ * @param fieldNamePatterns if non-null, any completion field name matching any of these patterns will break out its in-heap bytes
+ * separately in the returned {@link CompletionStats}
+ */
+ public static CompletionStats completionStats(IndexReader indexReader, String ... fieldNamePatterns) {
long sizeInBytes = 0;
ObjectLongHashMap<String> completionFields = null;
- if (fields != null && fields.length > 0) {
- completionFields = new ObjectLongHashMap<>(fields.length);
+ if (fieldNamePatterns != null && fieldNamePatterns.length > 0) {
+ completionFields = new ObjectLongHashMap<>(fieldNamePatterns.length);
}
for (LeafReaderContext atomicReaderContext : indexReader.leaves()) {
LeafReader atomicReader = atomicReaderContext.reader();
try {
- for (String fieldName : atomicReader.fields()) {
- Terms terms = atomicReader.fields().terms(fieldName);
+ Fields fields = atomicReader.fields();
+ for (String fieldName : fields) {
+ Terms terms = fields.terms(fieldName);
if (terms instanceof CompletionTerms) {
// TODO: currently we load up the suggester for reporting its size
long fstSize = ((CompletionTerms) terms).suggester().ramBytesUsed();
- if (fields != null && fields.length > 0 && Regex.simpleMatch(fields, fieldName)) {
+ if (fieldNamePatterns != null && fieldNamePatterns.length > 0 && Regex.simpleMatch(fieldNamePatterns, fieldName)) {
completionFields.addTo(fieldName, fstSize);
}
sizeInBytes += fstSize;
}
}
- } catch (IOException ignored) {
- throw new ElasticsearchException(ignored);
+ } catch (IOException ioe) {
+ throw new ElasticsearchException(ioe);
}
}
return new CompletionStats(sizeInBytes, completionFields);
|
7c7e4a2e255eff4ee29992330b352a7665050834
|
camel
|
CAMEL-5906: Added new camel-servletlistener- component for bootstrapping Camel in web app without using spring etc. Work- in progress.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1428278 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/camel
|
diff --git a/components/camel-servletlistener/src/main/java/org/apache/camel/component/servletlistener/CamelContextLifecycle.java b/components/camel-servletlistener/src/main/java/org/apache/camel/component/servletlistener/CamelContextLifecycle.java
index 733c7b07d69fb..762c8f22c04a1 100644
--- a/components/camel-servletlistener/src/main/java/org/apache/camel/component/servletlistener/CamelContextLifecycle.java
+++ b/components/camel-servletlistener/src/main/java/org/apache/camel/component/servletlistener/CamelContextLifecycle.java
@@ -24,12 +24,40 @@
*/
public interface CamelContextLifecycle {
+ /**
+ * Callback before starting {@link ServletCamelContext}.
+ *
+ * @param camelContext the Camel context
+ * @param jndi the JNDI context.
+ * @throws Exception is thrown if any error.
+ */
void beforeStart(ServletCamelContext camelContext, JndiContext jndi) throws Exception;
+ /**
+ * Callback after {@link ServletCamelContext} has been started.
+ *
+ * @param camelContext the Camel context
+ * @param jndi the JNDI context.
+ * @throws Exception is thrown if any error.
+ */
void afterStart(ServletCamelContext camelContext, JndiContext jndi) throws Exception;
+ /**
+ * Callback before stopping {@link ServletCamelContext}.
+ *
+ * @param camelContext the Camel context
+ * @param jndi the JNDI context.
+ * @throws Exception is thrown if any error.
+ */
void beforeStop(ServletCamelContext camelContext, JndiContext jndi) throws Exception;
+ /**
+ * Callback after {@link ServletCamelContext} has been stopped.
+ *
+ * @param camelContext the Camel context
+ * @param jndi the JNDI context.
+ * @throws Exception is thrown if any error.
+ */
void afterStop(ServletCamelContext camelContext, JndiContext jndi) throws Exception;
}
diff --git a/components/camel-servletlistener/src/main/java/org/apache/camel/component/servletlistener/CamelContextServletListener.java b/components/camel-servletlistener/src/main/java/org/apache/camel/component/servletlistener/CamelContextServletListener.java
index 190662310c700..ab95c1e5f95bf 100644
--- a/components/camel-servletlistener/src/main/java/org/apache/camel/component/servletlistener/CamelContextServletListener.java
+++ b/components/camel-servletlistener/src/main/java/org/apache/camel/component/servletlistener/CamelContextServletListener.java
@@ -60,7 +60,6 @@ public class CamelContextServletListener implements ServletContextListener {
private boolean test;
@Override
- @SuppressWarnings("unchecked")
public void contextInitialized(ServletContextEvent sce) {
LOG.info("CamelContextServletListener initializing ...");
@@ -128,7 +127,7 @@ public void contextInitialized(ServletContextEvent sce) {
}
// any custom CamelContextLifecycle
- String lifecycle = (String) map.remove(CamelContextLifecycle.class.getName());
+ String lifecycle = (String) map.remove("CamelContextLifecycle");
if (lifecycle != null) {
try {
Class<CamelContextLifecycle> clazz = camelContext.getClassResolver().resolveMandatoryClass(lifecycle, CamelContextLifecycle.class);
@@ -164,6 +163,55 @@ public void contextInitialized(ServletContextEvent sce) {
LOG.info("CamelContextServletListener initialized");
}
+ @Override
+ public void contextDestroyed(ServletContextEvent sce) {
+ LOG.info("CamelContextServletListener destroying ...");
+ if (camelContext != null) {
+ try {
+ if (camelContextLifecycle != null) {
+ camelContextLifecycle.beforeStop(camelContext, jndiContext);
+ }
+ camelContext.stop();
+ if (camelContextLifecycle != null) {
+ camelContextLifecycle.afterStop(camelContext, jndiContext);
+ }
+ } catch (Exception e) {
+ LOG.warn("Error stopping CamelContext. This exception will be ignored.", e);
+ }
+ }
+ camelContext = null;
+ jndiContext = null;
+ instance = null;
+ LOG.info("CamelContextServletListener destroyed");
+ }
+
+ private Map<String, Object> extractInitParameters(ServletContextEvent sce) {
+ // configure CamelContext with the init parameter
+ Map<String, Object> map = new LinkedHashMap<String, Object>();
+ Enumeration names = sce.getServletContext().getInitParameterNames();
+ while (names.hasMoreElements()) {
+ String name = (String) names.nextElement();
+ String value = sce.getServletContext().getInitParameter(name);
+
+ if (ObjectHelper.isNotEmpty(value)) {
+ Object target = value;
+ if (value.startsWith("#")) {
+ // a reference lookup in jndi
+ value = value.substring(1);
+ target = lookupJndi(jndiContext, value);
+ }
+ map.put(name, target);
+ }
+ }
+ return map;
+ }
+
+ /**
+ * Extract the routes from the parameters.
+ *
+ * @param map parameters
+ * @return a list of routes, which can be of different types. See source code for more details.
+ */
private List<Object> extractRoutes(Map<String, Object> map) {
List<Object> answer = new ArrayList<Object>();
List<String> names = new ArrayList<String>();
@@ -237,27 +285,6 @@ private List<Object> extractRoutes(Map<String, Object> map) {
return answer;
}
- private Map<String, Object> extractInitParameters(ServletContextEvent sce) {
- // configure CamelContext with the init parameter
- Map<String, Object> map = new LinkedHashMap<String, Object>();
- Enumeration names = sce.getServletContext().getInitParameterNames();
- while (names.hasMoreElements()) {
- String name = (String) names.nextElement();
- String value = sce.getServletContext().getInitParameter(name);
-
- if (ObjectHelper.isNotEmpty(value)) {
- Object target = value;
- if (value.startsWith("#")) {
- // a reference lookup in jndi
- value = value.substring(1);
- target = lookupJndi(jndiContext, value);
- }
- map.put(name, target);
- }
- }
- return map;
- }
-
private static Object lookupJndi(JndiContext jndiContext, String name) {
try {
return jndiContext.lookup(name);
@@ -266,26 +293,4 @@ private static Object lookupJndi(JndiContext jndiContext, String name) {
}
}
- @Override
- public void contextDestroyed(ServletContextEvent sce) {
- LOG.info("CamelContextServletListener destroying ...");
- if (camelContext != null) {
- try {
- if (camelContextLifecycle != null) {
- camelContextLifecycle.beforeStop(camelContext, jndiContext);
- }
- camelContext.stop();
- if (camelContextLifecycle != null) {
- camelContextLifecycle.afterStop(camelContext, jndiContext);
- }
- } catch (Exception e) {
- LOG.warn("Error stopping CamelContext. This exception will be ignored.", e);
- }
- }
- camelContext = null;
- jndiContext = null;
- instance = null;
- LOG.info("CamelContextServletListener destroyed");
- }
-
}
diff --git a/components/camel-servletlistener/src/main/java/org/apache/camel/component/servletlistener/ServletCamelContext.java b/components/camel-servletlistener/src/main/java/org/apache/camel/component/servletlistener/ServletCamelContext.java
index ad7e95e1321a5..bb053086436f5 100644
--- a/components/camel-servletlistener/src/main/java/org/apache/camel/component/servletlistener/ServletCamelContext.java
+++ b/components/camel-servletlistener/src/main/java/org/apache/camel/component/servletlistener/ServletCamelContext.java
@@ -26,10 +26,20 @@
*/
public class ServletCamelContext extends DefaultCamelContext {
+ private final Context jndiContext;
private final ServletContext servletContext;
public ServletCamelContext(Context jndiContext, ServletContext servletContext) {
super(jndiContext);
+ this.jndiContext = jndiContext;
this.servletContext = servletContext;
}
+
+ public Context getJndiContext() {
+ return jndiContext;
+ }
+
+ public ServletContext getServletContext() {
+ return servletContext;
+ }
}
diff --git a/components/camel-servletlistener/src/test/java/org/apache/camel/component/servletlistener/HelloBean.java b/components/camel-servletlistener/src/test/java/org/apache/camel/component/servletlistener/HelloBean.java
new file mode 100644
index 0000000000000..ef1ad32ce1780
--- /dev/null
+++ b/components/camel-servletlistener/src/test/java/org/apache/camel/component/servletlistener/HelloBean.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.servletlistener;
+
+/**
+ *
+ */
+public class HelloBean {
+
+ public String hello(String name) {
+ return "Hello " + name;
+ }
+}
diff --git a/components/camel-servletlistener/src/test/java/org/apache/camel/component/servletlistener/LifecycleTest.java b/components/camel-servletlistener/src/test/java/org/apache/camel/component/servletlistener/LifecycleTest.java
new file mode 100644
index 0000000000000..6d547bf9520da
--- /dev/null
+++ b/components/camel-servletlistener/src/test/java/org/apache/camel/component/servletlistener/LifecycleTest.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.servletlistener;
+
+import org.apache.camel.CamelContext;
+import org.apache.camel.ProducerTemplate;
+import org.apache.camel.component.mock.MockEndpoint;
+import org.junit.Test;
+
+/**
+ *
+ */
+public class LifecycleTest extends ServletCamelTestSupport {
+
+ protected String getConfiguration() {
+ return "/myweb5.xml";
+ }
+
+ @Test
+ public void testCamelContext() throws Exception {
+ CamelContext context = getCamelContext();
+ assertNotNull(context);
+
+ assertEquals("MyCamel", context.getName());
+
+ ProducerTemplate template = context.createProducerTemplate();
+
+ MockEndpoint mock = context.getEndpoint("mock:foo", MockEndpoint.class);
+ mock.expectedBodiesReceived("Hello World");
+
+ template.sendBody("seda:foo", "World");
+
+ mock.assertIsSatisfied();
+ template.stop();
+ }
+
+}
diff --git a/components/camel-servletlistener/src/test/java/org/apache/camel/component/servletlistener/MyBeanRoute.java b/components/camel-servletlistener/src/test/java/org/apache/camel/component/servletlistener/MyBeanRoute.java
new file mode 100644
index 0000000000000..2a91209a3602b
--- /dev/null
+++ b/components/camel-servletlistener/src/test/java/org/apache/camel/component/servletlistener/MyBeanRoute.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.servletlistener;
+
+import org.apache.camel.builder.RouteBuilder;
+
+/**
+ *
+ */
+public class MyBeanRoute extends RouteBuilder {
+
+ @Override
+ public void configure() throws Exception {
+ from("seda:foo").routeId("foo")
+ .to("bean:myBean")
+ .to("mock:foo");
+ }
+}
diff --git a/components/camel-servletlistener/src/test/java/org/apache/camel/component/servletlistener/MyLifecycle.java b/components/camel-servletlistener/src/test/java/org/apache/camel/component/servletlistener/MyLifecycle.java
new file mode 100644
index 0000000000000..386b5f1a6a118
--- /dev/null
+++ b/components/camel-servletlistener/src/test/java/org/apache/camel/component/servletlistener/MyLifecycle.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.servletlistener;
+
+import org.apache.camel.util.jndi.JndiContext;
+
+/**
+ *
+ */
+// START SNIPPET: e1
+public class MyLifecycle extends CamelContextLifecycleSupport {
+
+ @Override
+ public void beforeStart(ServletCamelContext camelContext, JndiContext jndi) throws Exception {
+ // enlist our bean(s) in the registry
+ jndi.bind("myBean", new HelloBean());
+ }
+}
+// END SNIPPET: e1
diff --git a/components/camel-servletlistener/src/test/resources/myweb5.xml b/components/camel-servletlistener/src/test/resources/myweb5.xml
new file mode 100644
index 0000000000000..004effb519af9
--- /dev/null
+++ b/components/camel-servletlistener/src/test/resources/myweb5.xml
@@ -0,0 +1,57 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+
+<!DOCTYPE web-app
+ PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
+ "http://java.sun.com/dtd/web-app_2_3.dtd">
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+<!-- START SNIPPET: web -->
+<web-app>
+
+ <!-- the test parameter is only to be used for unit testing -->
+ <context-param>
+ <param-name>test</param-name>
+ <param-value>true</param-value>
+ </context-param>
+
+ <!-- you can configure any of the properties on CamelContext, eg setName will be configured as below -->
+ <context-param>
+ <param-name>name</param-name>
+ <param-value>MyCamel</param-value>
+ </context-param>
+
+ <!-- configure a route builder to use -->
+ <!-- Camel will pickup any parameter names that start with routeBuilder (case ignored) -->
+ <context-param>
+ <param-name>routeBuilder-MyRoute</param-name>
+ <param-value>org.apache.camel.component.servletlistener.MyBeanRoute</param-value>
+ </context-param>
+ <!-- configure our lifecycle class, which allows us to do custom logic before/after starting CamelContext -->
+ <context-param>
+ <param-name>CamelContextLifecycle</param-name>
+ <param-value>org.apache.camel.component.servletlistener.MyLifecycle</param-value>
+ </context-param>
+
+ <listener>
+ <listener-class>org.apache.camel.component.servletlistener.CamelContextServletListener</listener-class>
+ </listener>
+
+</web-app>
+<!-- END SNIPPET: web -->
\ No newline at end of file
|
9ef72b22e87757ff7ff3a499cb01bb6c771e2f2e
|
orientdb
|
Fixed bug on management of indexes with the- introducing of new ODistributedStorage--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/build.number b/build.number
index b3a22ef5cc0..b776f13db09 100644
--- a/build.number
+++ b/build.number
@@ -1,3 +1,3 @@
#Build Number for ANT. Do not edit!
-#Thu Jul 05 12:04:44 CEST 2012
-build.number=12170
+#Thu Jul 05 13:56:06 CEST 2012
+build.number=12174
diff --git a/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseRecordAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseRecordAbstract.java
index 7d888263e3c..4ff55451e75 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseRecordAbstract.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseRecordAbstract.java
@@ -61,7 +61,6 @@
import com.orientechnologies.orient.core.sql.OCommandSQL;
import com.orientechnologies.orient.core.storage.ORawBuffer;
import com.orientechnologies.orient.core.storage.ORecordCallback;
-import com.orientechnologies.orient.core.storage.OStorageEmbedded;
import com.orientechnologies.orient.core.storage.OStorageProxy;
import com.orientechnologies.orient.core.tx.OTransactionRealAbstract;
import com.orientechnologies.orient.core.type.tree.provider.OMVRBTreeRIDProvider;
@@ -163,7 +162,7 @@ public <DB extends ODatabase> DB create() {
getStorage().getConfiguration().update();
- if (getStorage() instanceof OStorageEmbedded) {
+ if (!(getStorage() instanceof OStorageProxy)) {
registerHook(new OUserTrigger());
registerHook(new OClassIndexManager());
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/metadata/OMetadata.java b/core/src/main/java/com/orientechnologies/orient/core/metadata/OMetadata.java
index fcda6cce341..536fc2459b6 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/metadata/OMetadata.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/metadata/OMetadata.java
@@ -34,129 +34,129 @@
import com.orientechnologies.orient.core.metadata.security.OSecurityProxy;
import com.orientechnologies.orient.core.metadata.security.OSecurityShared;
import com.orientechnologies.orient.core.storage.OStorage;
-import com.orientechnologies.orient.core.storage.OStorageEmbedded;
+import com.orientechnologies.orient.core.storage.OStorageProxy;
public class OMetadata {
- protected int schemaClusterId;
-
- protected OSchemaProxy schema;
- protected OSecurity security;
- protected OIndexManagerProxy indexManager;
-
- public OMetadata() {
- }
-
- public void load() {
- final long timer = OProfiler.getInstance().startChrono();
-
- try {
- init(true);
-
- if (schemaClusterId == -1 || getDatabase().countClusterElements(OStorage.CLUSTER_INTERNAL_NAME) == 0)
- return;
- } finally {
- OProfiler.getInstance().stopChrono("OMetadata.load", timer);
- }
- }
-
- public void create() throws IOException {
- final long timer = OProfiler.getInstance().startChrono();
-
- try {
- init(false);
-
- security.create();
- schema.create();
- indexManager.create();
- } finally {
- OProfiler.getInstance().stopChrono("OMetadata.load", timer);
- }
- }
-
- public OSchema getSchema() {
- return schema;
- }
-
- public OSecurity getSecurity() {
- return security;
- }
-
- public OIndexManagerProxy getIndexManager() {
- return indexManager;
- }
-
- public int getSchemaClusterId() {
- return schemaClusterId;
- }
-
- private void init(final boolean iLoad) {
- final ODatabaseRecord database = getDatabase();
- schemaClusterId = database.getClusterIdByName(OStorage.CLUSTER_INTERNAL_NAME);
-
- indexManager = new OIndexManagerProxy(database.getStorage().getResource(OIndexManager.class.getSimpleName(),
- new Callable<OIndexManager>() {
- public OIndexManager call() {
- OIndexManager instance;
- if (database.getStorage() instanceof OStorageEmbedded)
- instance = new OIndexManagerShared(database);
- else
- instance = new OIndexManagerRemote(database);
-
- if (iLoad)
- instance.load();
-
- return instance;
- }
- }), database);
-
- schema = new OSchemaProxy(database.getStorage().getResource(OSchema.class.getSimpleName(), new Callable<OSchemaShared>() {
- public OSchemaShared call() {
- final OSchemaShared instance = new OSchemaShared(schemaClusterId);
- if (iLoad)
- instance.load();
- return instance;
- }
- }), database);
-
- final Boolean enableSecurity = (Boolean) database.getProperty(ODatabase.OPTIONS.SECURITY.toString());
- if (enableSecurity != null && !enableSecurity)
- // INSTALL NO SECURITY IMPL
- security = new OSecurityNull();
- else
- security = new OSecurityProxy(database.getStorage().getResource(OSecurity.class.getSimpleName(),
- new Callable<OSecurityShared>() {
- public OSecurityShared call() {
- final OSecurityShared instance = new OSecurityShared();
- if (iLoad)
- instance.load();
- return instance;
- }
- }), database);
-
- }
-
- /**
- * Reloads the internal objects.
- */
- public void reload() {
- schema.reload();
- indexManager.load();
- security.load();
- }
-
- /**
- * Closes internal objects
- */
- public void close() {
- if (indexManager != null)
- indexManager.flush();
- if (schema != null)
- schema.close();
- if (security != null)
- security.close();
- }
-
- protected ODatabaseRecord getDatabase() {
- return ODatabaseRecordThreadLocal.INSTANCE.get();
- }
+ protected int schemaClusterId;
+
+ protected OSchemaProxy schema;
+ protected OSecurity security;
+ protected OIndexManagerProxy indexManager;
+
+ public OMetadata() {
+ }
+
+ public void load() {
+ final long timer = OProfiler.getInstance().startChrono();
+
+ try {
+ init(true);
+
+ if (schemaClusterId == -1 || getDatabase().countClusterElements(OStorage.CLUSTER_INTERNAL_NAME) == 0)
+ return;
+ } finally {
+ OProfiler.getInstance().stopChrono("OMetadata.load", timer);
+ }
+ }
+
+ public void create() throws IOException {
+ final long timer = OProfiler.getInstance().startChrono();
+
+ try {
+ init(false);
+
+ security.create();
+ schema.create();
+ indexManager.create();
+ } finally {
+ OProfiler.getInstance().stopChrono("OMetadata.load", timer);
+ }
+ }
+
+ public OSchema getSchema() {
+ return schema;
+ }
+
+ public OSecurity getSecurity() {
+ return security;
+ }
+
+ public OIndexManagerProxy getIndexManager() {
+ return indexManager;
+ }
+
+ public int getSchemaClusterId() {
+ return schemaClusterId;
+ }
+
+ private void init(final boolean iLoad) {
+ final ODatabaseRecord database = getDatabase();
+ schemaClusterId = database.getClusterIdByName(OStorage.CLUSTER_INTERNAL_NAME);
+
+ indexManager = new OIndexManagerProxy(database.getStorage().getResource(OIndexManager.class.getSimpleName(),
+ new Callable<OIndexManager>() {
+ public OIndexManager call() {
+ OIndexManager instance;
+ if (database.getStorage() instanceof OStorageProxy)
+ instance = new OIndexManagerRemote(database);
+ else
+ instance = new OIndexManagerShared(database);
+
+ if (iLoad)
+ instance.load();
+
+ return instance;
+ }
+ }), database);
+
+ schema = new OSchemaProxy(database.getStorage().getResource(OSchema.class.getSimpleName(), new Callable<OSchemaShared>() {
+ public OSchemaShared call() {
+ final OSchemaShared instance = new OSchemaShared(schemaClusterId);
+ if (iLoad)
+ instance.load();
+ return instance;
+ }
+ }), database);
+
+ final Boolean enableSecurity = (Boolean) database.getProperty(ODatabase.OPTIONS.SECURITY.toString());
+ if (enableSecurity != null && !enableSecurity)
+ // INSTALL NO SECURITY IMPL
+ security = new OSecurityNull();
+ else
+ security = new OSecurityProxy(database.getStorage().getResource(OSecurity.class.getSimpleName(),
+ new Callable<OSecurityShared>() {
+ public OSecurityShared call() {
+ final OSecurityShared instance = new OSecurityShared();
+ if (iLoad)
+ instance.load();
+ return instance;
+ }
+ }), database);
+
+ }
+
+ /**
+ * Reloads the internal objects.
+ */
+ public void reload() {
+ schema.reload();
+ indexManager.load();
+ security.load();
+ }
+
+ /**
+ * Closes internal objects
+ */
+ public void close() {
+ if (indexManager != null)
+ indexManager.flush();
+ if (schema != null)
+ schema.close();
+ if (security != null)
+ security.close();
+ }
+
+ protected ODatabaseRecord getDatabase() {
+ return ODatabaseRecordThreadLocal.INSTANCE.get();
+ }
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/metadata/schema/OPropertyImpl.java b/core/src/main/java/com/orientechnologies/orient/core/metadata/schema/OPropertyImpl.java
index cfb4327c3bf..2b6af561419 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/metadata/schema/OPropertyImpl.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/metadata/schema/OPropertyImpl.java
@@ -16,7 +16,15 @@
package com.orientechnologies.orient.core.metadata.schema;
import java.text.ParseException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
import com.orientechnologies.common.util.OCaseIncentiveComparator;
import com.orientechnologies.common.util.OCollections;
@@ -34,7 +42,7 @@
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.serialization.serializer.OStringSerializerHelper;
import com.orientechnologies.orient.core.sql.OCommandSQL;
-import com.orientechnologies.orient.core.storage.OStorageEmbedded;
+import com.orientechnologies.orient.core.storage.OStorageProxy;
import com.orientechnologies.orient.core.type.ODocumentWrapperNoClass;
/**
@@ -364,15 +372,14 @@ public String getCustom(final String iName) {
return customFields.get(iName);
}
- public void setCustomInternal(final String iName, final String iValue) {
- if (customFields == null)
- customFields = new HashMap<String, String>();
-
- customFields.put(iName, iValue);
- }
+ public void setCustomInternal(final String iName, final String iValue) {
+ if (customFields == null)
+ customFields = new HashMap<String, String>();
+ customFields.put(iName, iValue);
+ }
- public OPropertyImpl setCustom(final String iName, final String iValue) {
+ public OPropertyImpl setCustom(final String iName, final String iValue) {
getDatabase().checkSecurity(ODatabaseSecurityResources.SCHEMA, ORole.PERMISSION_UPDATE);
final String cmd = String.format("alter property %s custom %s=%s", getFullName(), iName, iValue);
getDatabase().command(new OCommandSQL(cmd)).execute();
@@ -380,13 +387,13 @@ public OPropertyImpl setCustom(final String iName, final String iValue) {
return this;
}
- public Map<String, String> getCustomInternal() {
- if (customFields != null)
- return Collections.unmodifiableMap(customFields);
- return null;
- }
+ public Map<String, String> getCustomInternal() {
+ if (customFields != null)
+ return Collections.unmodifiableMap(customFields);
+ return null;
+ }
- /**
+ /**
* Change the type. It checks for compatibility between the change of type.
*
* @param iType
@@ -628,7 +635,7 @@ public ODocument toStream() {
}
public void saveInternal() {
- if (getDatabase().getStorage() instanceof OStorageEmbedded)
+ if (!(getDatabase().getStorage() instanceof OStorageProxy))
((OSchemaProxy) getDatabase().getMetadata().getSchema()).saveInternal();
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityShared.java b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityShared.java
index ca4b55d0c09..c6f5cac5d2e 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityShared.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityShared.java
@@ -31,7 +31,7 @@
import com.orientechnologies.orient.core.sql.OCommandSQL;
import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery;
import com.orientechnologies.orient.core.storage.OStorage;
-import com.orientechnologies.orient.core.storage.OStorageEmbedded;
+import com.orientechnologies.orient.core.storage.OStorageProxy;
/**
* Shared security class. It's shared by all the database instances that point to the same storage.
@@ -53,7 +53,7 @@ public OUser authenticate(final String iUserName, final String iUserPassword) {
if (user.getAccountStatus() != STATUSES.ACTIVE)
throw new OSecurityAccessException(dbName, "User '" + iUserName + "' is not active");
- if (getDatabase().getStorage() instanceof OStorageEmbedded) {
+ if (!(getDatabase().getStorage() instanceof OStorageProxy)) {
// CHECK USER & PASSWORD
if (!user.checkPassword(iUserPassword)) {
// WAIT A BIT TO AVOID BRUTE FORCE
diff --git a/core/src/main/java/com/orientechnologies/orient/core/query/nativ/ONativeAsynchQuery.java b/core/src/main/java/com/orientechnologies/orient/core/query/nativ/ONativeAsynchQuery.java
index db49f25a387..f0612926262 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/query/nativ/ONativeAsynchQuery.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/query/nativ/ONativeAsynchQuery.java
@@ -27,101 +27,101 @@
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.record.ORecordInternal;
import com.orientechnologies.orient.core.record.impl.ODocument;
-import com.orientechnologies.orient.core.storage.OStorageEmbedded;
+import com.orientechnologies.orient.core.storage.OStorageProxy;
@SuppressWarnings("serial")
public abstract class ONativeAsynchQuery<CTX extends OQueryContextNative> extends ONativeQuery<CTX> {
- protected int resultCount = 0;
- protected ORecordInternal<?> record;
-
- public ONativeAsynchQuery(final String iCluster, final CTX iQueryRecordImpl) {
- this(iCluster, iQueryRecordImpl, null);
- }
-
- public ONativeAsynchQuery(final String iCluster, final CTX iQueryRecordImpl, final OCommandResultListener iResultListener) {
- super(iCluster);
- resultListener = iResultListener;
- queryRecord = iQueryRecordImpl;
- record = new ODocument();
- }
-
- @Deprecated
- public ONativeAsynchQuery(final ODatabaseRecord iDatabase, final String iCluster, final CTX iQueryRecordImpl,
- final OCommandResultListener iResultListener) {
- this(iCluster, iQueryRecordImpl, iResultListener);
- }
-
- public boolean isAsynchronous() {
- return resultListener != this;
- }
-
- public boolean foreach(final ORecordInternal<?> iRecord) {
- final ODocument record = (ODocument) iRecord;
- queryRecord.setRecord(record);
-
- if (filter(queryRecord)) {
- resultCount++;
- resultListener.result(record.copy());
-
- if (limit > -1 && resultCount == limit)
- // BREAK THE EXECUTION
- return false;
- }
- return true;
- }
-
- public List<ODocument> run(final Object... iArgs) {
- final ODatabaseRecord database = ODatabaseRecordThreadLocal.INSTANCE.get();
-
- if (!(database.getStorage() instanceof OStorageEmbedded))
- throw new OCommandExecutionException("Native queries can run only in embedded-local version. Not in the remote one.");
-
- queryRecord.setSourceQuery(this);
-
- // CHECK IF A CLASS WAS CREATED
- final OClass cls = database.getMetadata().getSchema().getClass(className);
- if (cls == null)
- throw new OCommandExecutionException("Class '" + className + "' was not found");
-
- final ORecordIteratorClass<ORecordInternal<?>> target = new ORecordIteratorClass<ORecordInternal<?>>(database,
- (ODatabaseRecordAbstract) database, className, isPolymorphic());
-
- // BROWSE ALL THE RECORDS
- for (OIdentifiable id : target) {
- final ORecordInternal<?> record = (ORecordInternal<?>) id.getRecord();
-
- if (record != null && record.getRecordType() != ODocument.RECORD_TYPE)
- // WRONG RECORD TYPE: JUMP IT
- continue;
-
- queryRecord.setRecord((ODocument) record);
-
- if (filter(queryRecord)) {
- resultCount++;
- resultListener.result(record.copy());
-
- if (limit > -1 && resultCount == limit)
- // BREAK THE EXECUTION
- break;
- }
- }
-
- return null;
- }
-
- public ODocument runFirst(final Object... iArgs) {
- setLimit(1);
- execute();
- return null;
- }
-
- @Override
- public OCommandResultListener getResultListener() {
- return resultListener;
- }
-
- @Override
- public void setResultListener(final OCommandResultListener resultListener) {
- this.resultListener = resultListener;
- }
+ protected int resultCount = 0;
+ protected ORecordInternal<?> record;
+
+ public ONativeAsynchQuery(final String iCluster, final CTX iQueryRecordImpl) {
+ this(iCluster, iQueryRecordImpl, null);
+ }
+
+ public ONativeAsynchQuery(final String iCluster, final CTX iQueryRecordImpl, final OCommandResultListener iResultListener) {
+ super(iCluster);
+ resultListener = iResultListener;
+ queryRecord = iQueryRecordImpl;
+ record = new ODocument();
+ }
+
+ @Deprecated
+ public ONativeAsynchQuery(final ODatabaseRecord iDatabase, final String iCluster, final CTX iQueryRecordImpl,
+ final OCommandResultListener iResultListener) {
+ this(iCluster, iQueryRecordImpl, iResultListener);
+ }
+
+ public boolean isAsynchronous() {
+ return resultListener != this;
+ }
+
+ public boolean foreach(final ORecordInternal<?> iRecord) {
+ final ODocument record = (ODocument) iRecord;
+ queryRecord.setRecord(record);
+
+ if (filter(queryRecord)) {
+ resultCount++;
+ resultListener.result(record.copy());
+
+ if (limit > -1 && resultCount == limit)
+ // BREAK THE EXECUTION
+ return false;
+ }
+ return true;
+ }
+
+ public List<ODocument> run(final Object... iArgs) {
+ final ODatabaseRecord database = ODatabaseRecordThreadLocal.INSTANCE.get();
+
+ if (database.getStorage() instanceof OStorageProxy)
+ throw new OCommandExecutionException("Native queries can run only in embedded-local version. Not in the remote one.");
+
+ queryRecord.setSourceQuery(this);
+
+ // CHECK IF A CLASS WAS CREATED
+ final OClass cls = database.getMetadata().getSchema().getClass(className);
+ if (cls == null)
+ throw new OCommandExecutionException("Class '" + className + "' was not found");
+
+ final ORecordIteratorClass<ORecordInternal<?>> target = new ORecordIteratorClass<ORecordInternal<?>>(database,
+ (ODatabaseRecordAbstract) database, className, isPolymorphic());
+
+ // BROWSE ALL THE RECORDS
+ for (OIdentifiable id : target) {
+ final ORecordInternal<?> record = (ORecordInternal<?>) id.getRecord();
+
+ if (record != null && record.getRecordType() != ODocument.RECORD_TYPE)
+ // WRONG RECORD TYPE: JUMP IT
+ continue;
+
+ queryRecord.setRecord((ODocument) record);
+
+ if (filter(queryRecord)) {
+ resultCount++;
+ resultListener.result(record.copy());
+
+ if (limit > -1 && resultCount == limit)
+ // BREAK THE EXECUTION
+ break;
+ }
+ }
+
+ return null;
+ }
+
+ public ODocument runFirst(final Object... iArgs) {
+ setLimit(1);
+ execute();
+ return null;
+ }
+
+ @Override
+ public OCommandResultListener getResultListener() {
+ return resultListener;
+ }
+
+ @Override
+ public void setResultListener(final OCommandResultListener resultListener) {
+ this.resultListener = resultListener;
+ }
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/tx/OTransactionOptimistic.java b/core/src/main/java/com/orientechnologies/orient/core/tx/OTransactionOptimistic.java
index f3988a63c62..7452527363e 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/tx/OTransactionOptimistic.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/tx/OTransactionOptimistic.java
@@ -38,6 +38,7 @@
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.storage.ORecordCallback;
import com.orientechnologies.orient.core.storage.OStorageEmbedded;
+import com.orientechnologies.orient.core.storage.OStorageProxy;
public class OTransactionOptimistic extends OTransactionRealAbstract {
private boolean usingLog;
@@ -56,7 +57,7 @@ public void commit() {
checkTransaction();
status = TXSTATUS.COMMITTING;
- if (!(database.getStorage() instanceof OStorageEmbedded))
+ if (database.getStorage() instanceof OStorageProxy)
database.getStorage().commit(this);
else {
final List<String> involvedIndexes = getInvolvedIndexes();
diff --git a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java
index 429906a4d6e..08bcd698245 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java
@@ -56,7 +56,7 @@
import com.orientechnologies.orient.core.serialization.serializer.record.string.ORecordSerializerStringAbstract;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializerAnyStreamable;
import com.orientechnologies.orient.core.storage.OCluster;
-import com.orientechnologies.orient.core.storage.OStorageEmbedded;
+import com.orientechnologies.orient.core.storage.OStorageProxy;
import com.orientechnologies.orient.enterprise.channel.binary.OChannelBinaryProtocol;
import com.orientechnologies.orient.enterprise.channel.binary.OChannelBinaryServer;
import com.orientechnologies.orient.server.OClientConnection;
@@ -439,7 +439,7 @@ protected void openDatabase() throws IOException {
connection.database = (ODatabaseDocumentTx) OServerMain.server().openDatabase(dbType, dbURL, user, passwd);
connection.rawDatabase = ((ODatabaseRaw) ((ODatabaseComplex<?>) connection.database.getUnderlying()).getUnderlying());
- if (!(connection.database.getStorage() instanceof OStorageEmbedded) && !loadUserFromSchema(user, passwd)) {
+ if (connection.database.getStorage() instanceof OStorageProxy && !loadUserFromSchema(user, passwd)) {
sendError(clientTxId, new OSecurityAccessException(connection.database.getName(),
"User or password not valid for database: '" + connection.database.getName() + "'"));
} else {
diff --git a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/ONetworkProtocolHttpAbstract.java b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/ONetworkProtocolHttpAbstract.java
index 957ed874e95..b5b0800c9a3 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/ONetworkProtocolHttpAbstract.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/ONetworkProtocolHttpAbstract.java
@@ -158,7 +158,7 @@ public void service() throws ONetworkProtocolException, IOException {
connection.data.totalCommandExecutionTime += connection.data.lastCommandExecutionTime;
}
- protected void handleError(Exception e) {
+ protected void handleError(Throwable e) {
if (OLogManager.instance().isDebugEnabled())
OLogManager.instance().debug(this, "Caught exception", e);
@@ -209,13 +209,10 @@ protected void handleError(Exception e) {
}
if (cause != null)
- e = (Exception) cause;
+ e = cause;
} while (cause != null);
}
- if (errorReason == null)
- errorReason = OHttpUtils.STATUS_INTERNALERROR_DESCRIPTION;
-
if (errorMessage == null) {
// FORMAT GENERIC MESSAGE BY READING THE EXCEPTION STACK
final StringBuilder buffer = new StringBuilder();
@@ -229,6 +226,11 @@ protected void handleError(Exception e) {
errorMessage = buffer.toString();
}
+ if (errorReason == null) {
+ errorReason = OHttpUtils.STATUS_INTERNALERROR_DESCRIPTION;
+ OLogManager.instance().error(this, "Internal server error", e);
+ }
+
try {
sendTextContent(errorCode, errorReason, responseHeaders, OHttpUtils.CONTENT_TEXT_PLAIN, errorMessage);
} catch (IOException e1) {
diff --git a/server/src/main/java/com/orientechnologies/orient/server/task/OCreateRecordDistributedTask.java b/server/src/main/java/com/orientechnologies/orient/server/task/OCreateRecordDistributedTask.java
index 0fd763ae34b..65b85931710 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/task/OCreateRecordDistributedTask.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/task/OCreateRecordDistributedTask.java
@@ -53,8 +53,6 @@ public OCreateRecordDistributedTask(final String nodeSource, final String iDbNam
super(nodeSource, iDbName, iMode, iRid, iVersion);
content = iContent;
recordType = iRecordType;
- OLogManager.instance().warn(this, "DISTRIBUTED -> route CREATE RECORD in %s mode to %s %s{%s} v.%d", iMode, nodeSource,
- iDbName, iRid, iVersion);
}
@Override
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/RemoteCreateDocumentSpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/RemoteCreateDocumentSpeedTest.java
index 9b9d0c3725e..f660ecbfc7a 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/RemoteCreateDocumentSpeedTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/RemoteCreateDocumentSpeedTest.java
@@ -34,7 +34,7 @@ public class RemoteCreateDocumentSpeedTest extends OrientMonoThreadTest {
private ODocument record;
private Date date = new Date();
private long beginRecords;
- private final static long DELAY = 0;
+ private final static long DELAY = 1000;
public static void main(String[] iArgs) throws InstantiationException, IllegalAccessException {
RemoteCreateDocumentSpeedTest test = new RemoteCreateDocumentSpeedTest();
diff --git a/tools/src/main/java/com/orientechnologies/orient/console/OConsoleDatabaseApp.java b/tools/src/main/java/com/orientechnologies/orient/console/OConsoleDatabaseApp.java
index 50ce7f77697..555f2bbec48 100644
--- a/tools/src/main/java/com/orientechnologies/orient/console/OConsoleDatabaseApp.java
+++ b/tools/src/main/java/com/orientechnologies/orient/console/OConsoleDatabaseApp.java
@@ -83,7 +83,7 @@
import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery;
import com.orientechnologies.orient.core.storage.ORawBuffer;
import com.orientechnologies.orient.core.storage.OStorage;
-import com.orientechnologies.orient.core.storage.OStorageEmbedded;
+import com.orientechnologies.orient.core.storage.OStorageProxy;
import com.orientechnologies.orient.core.storage.impl.local.ODataHoleInfo;
import com.orientechnologies.orient.core.storage.impl.local.OStorageLocal;
import com.orientechnologies.orient.enterprise.command.OCommandExecutorScript;
@@ -1019,7 +1019,7 @@ public void clusters() {
clusterId = currentDatabase.getClusterIdByName(clusterName);
clusterType = currentDatabase.getClusterType(clusterName);
count = currentDatabase.countClusterElements(clusterName);
- if (currentDatabase.getStorage() instanceof OStorageEmbedded) {
+ if (!(currentDatabase.getStorage() instanceof OStorageProxy)) {
size = currentDatabase.getClusterRecordSizeByName(clusterName);
totalElements += count;
totalSize += size;
|
770ee735efa935a21b9992090eb063732e826ba5
|
ReactiveX-RxJava
|
Add unit tests for recursive scheduler usage--These tests came from @mairbek at https://github.com/Netflix/RxJava/pull/229-issuecomment-16115941-
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/test/java/rx/concurrency/TestSchedulers.java b/rxjava-core/src/test/java/rx/concurrency/TestSchedulers.java
index ec247d0b95..a4760ff65e 100644
--- a/rxjava-core/src/test/java/rx/concurrency/TestSchedulers.java
+++ b/rxjava-core/src/test/java/rx/concurrency/TestSchedulers.java
@@ -16,20 +16,24 @@
package rx.concurrency;
import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.atomic.AtomicReference;
import org.junit.Test;
import rx.Observable;
import rx.Observer;
+import rx.Scheduler;
import rx.Subscription;
+import rx.subscriptions.BooleanSubscription;
import rx.subscriptions.Subscriptions;
import rx.util.functions.Action1;
import rx.util.functions.Func1;
+import rx.util.functions.Func2;
public class TestSchedulers {
@@ -245,4 +249,114 @@ public void call(Integer t) {
assertEquals(5, count.get());
}
+ @Test
+ public void testRecursiveScheduler1() {
+ Observable<Integer> obs = Observable.create(new Func1<Observer<Integer>, Subscription>() {
+ @Override
+ public Subscription call(final Observer<Integer> observer) {
+ return Schedulers.currentThread().schedule(0, new Func2<Scheduler, Integer, Subscription>() {
+ @Override
+ public Subscription call(Scheduler scheduler, Integer i) {
+ if (i > 42) {
+ observer.onCompleted();
+ return Subscriptions.empty();
+ }
+
+ observer.onNext(i);
+
+ return scheduler.schedule(i + 1, this);
+ }
+ });
+ }
+ });
+
+ final AtomicInteger lastValue = new AtomicInteger();
+ obs.forEach(new Action1<Integer>() {
+
+ @Override
+ public void call(Integer v) {
+ System.out.println("Value: " + v);
+ lastValue.set(v);
+ }
+ });
+
+ assertEquals(42, lastValue.get());
+ }
+
+ @Test
+ public void testRecursiveScheduler2() throws InterruptedException {
+ // use latches instead of Thread.sleep
+ final CountDownLatch latch = new CountDownLatch(10);
+ final CountDownLatch completionLatch = new CountDownLatch(1);
+
+ Observable<Integer> obs = Observable.create(new Func1<Observer<Integer>, Subscription>() {
+ @Override
+ public Subscription call(final Observer<Integer> observer) {
+
+ return Schedulers.threadPoolForComputation().schedule(new BooleanSubscription(), new Func2<Scheduler, BooleanSubscription, Subscription>() {
+ @Override
+ public Subscription call(Scheduler scheduler, BooleanSubscription cancel) {
+ if (cancel.isUnsubscribed()) {
+ observer.onCompleted();
+ completionLatch.countDown();
+ return Subscriptions.empty();
+ }
+
+ observer.onNext(42);
+ latch.countDown();
+
+ try {
+ Thread.sleep(1);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+
+ scheduler.schedule(cancel, this);
+
+ return cancel;
+ }
+ });
+ }
+ });
+
+ @SuppressWarnings("unchecked")
+ Observer<Integer> o = mock(Observer.class);
+
+ final AtomicInteger count = new AtomicInteger();
+ final AtomicBoolean completed = new AtomicBoolean(false);
+ Subscription subscribe = obs.subscribe(new Observer<Integer>() {
+ @Override
+ public void onCompleted() {
+ System.out.println("Completed");
+ completed.set(true);
+ }
+
+ @Override
+ public void onError(Exception e) {
+ System.out.println("Error");
+ }
+
+ @Override
+ public void onNext(Integer args) {
+ count.incrementAndGet();
+ System.out.println(args);
+ }
+ });
+
+ if (!latch.await(5000, TimeUnit.MILLISECONDS)) {
+ fail("Timed out waiting on onNext latch");
+ }
+
+ // now unsubscribe and ensure it stops the recursive loop
+ subscribe.unsubscribe();
+ System.out.println("unsubscribe");
+
+ if (!completionLatch.await(5000, TimeUnit.MILLISECONDS)) {
+ fail("Timed out waiting on completion latch");
+ }
+
+ assertEquals(10, count.get()); // wondering if this could be 11 in a race condition (which would be okay due to how unsubscribe works ... just it would make this test non-deterministic)
+ assertTrue(completed.get());
+ }
+
}
|
eb831a81621c685e710b9ff75b1ac7f5c340d1db
|
orientdb
|
Issue 161: added the support for custom strategy- on creation of records- http://code.google.com/p/orient/wiki/Security-Customize_on_creation--
|
a
|
https://github.com/orientechnologies/orientdb
|
diff --git a/build.number b/build.number
index 0928ce53e5d..0e16b998107 100644
--- a/build.number
+++ b/build.number
@@ -1,3 +1,3 @@
#Build Number for ANT. Do not edit!
-#Mon Oct 01 16:53:04 CEST 2012
-build.number=12603
+#Mon Oct 01 18:00:48 CEST 2012
+build.number=12604
diff --git a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/ORestrictedAccessHook.java b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/ORestrictedAccessHook.java
index e2267e1490f..3a8a81eef1b 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/ORestrictedAccessHook.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/ORestrictedAccessHook.java
@@ -15,15 +15,16 @@
*/
package com.orientechnologies.orient.core.metadata.security;
-import java.util.HashSet;
import java.util.Set;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
+import com.orientechnologies.orient.core.exception.OConfigurationException;
import com.orientechnologies.orient.core.exception.OSecurityException;
import com.orientechnologies.orient.core.hook.ODocumentHookAbstract;
import com.orientechnologies.orient.core.metadata.schema.OClass;
+import com.orientechnologies.orient.core.metadata.schema.OClassImpl;
import com.orientechnologies.orient.core.record.impl.ODocument;
/**
@@ -40,14 +41,32 @@ public ORestrictedAccessHook() {
public RESULT onRecordBeforeCreate(final ODocument iDocument) {
final OClass cls = iDocument.getSchemaClass();
if (cls != null && cls.isSubClassOf(OSecurityShared.RESTRICTED_CLASSNAME)) {
- Set<OIdentifiable> allowed = iDocument.field(OSecurityShared.ALLOW_ALL_FIELD);
- if (allowed == null) {
- allowed = new HashSet<OIdentifiable>();
- iDocument.field(OSecurityShared.ALLOW_ALL_FIELD, allowed);
- }
- allowed.add(ODatabaseRecordThreadLocal.INSTANCE.get().getUser().getDocument().getIdentity());
+ String fieldNames = ((OClassImpl) cls).getCustom(OSecurityShared.ONCREATE_FIELD);
+ if (fieldNames == null)
+ fieldNames = OSecurityShared.ALLOW_ALL_FIELD;
+ final String[] fields = fieldNames.split(",");
+ String identityType = ((OClassImpl) cls).getCustom(OSecurityShared.ONCREATE_IDENTITY_TYPE);
+ if (identityType == null)
+ identityType = "user";
+
+ final ODatabaseRecord db = ODatabaseRecordThreadLocal.INSTANCE.get();
- return RESULT.RECORD_CHANGED;
+ ODocument identity = null;
+ if (identityType.equals("user"))
+ identity = db.getUser().getDocument();
+ else if (identityType.equals("role")) {
+ final Set<ORole> roles = db.getUser().getRoles();
+ if (!roles.isEmpty())
+ identity = roles.iterator().next().getDocument();
+ } else
+ throw new OConfigurationException("Wrong custom field '" + OSecurityShared.ONCREATE_IDENTITY_TYPE + "' in class '"
+ + cls.getName() + "' with value '" + identityType + "'. Supported ones are: 'user', 'role'");
+
+ if (identity != null) {
+ for (String f : fields)
+ db.getMetadata().getSecurity().allowIdentity(iDocument, f, identity);
+ return RESULT.RECORD_CHANGED;
+ }
}
return RESULT.RECORD_NOT_CHANGED;
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurity.java b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurity.java
index 8bb72ef6c34..b5af39a5bd8 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurity.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurity.java
@@ -38,10 +38,14 @@ public interface OSecurity {
public OIdentifiable allowRole(final ODocument iDocument, final String iAllowFieldName, final String iRoleName);
+ public OIdentifiable allowIdentity(final ODocument iDocument, final String iAllowFieldName, final OIdentifiable iId);
+
public OIdentifiable disallowUser(final ODocument iDocument, final String iAllowFieldName, final String iUserName);
public OIdentifiable disallowRole(final ODocument iDocument, final String iAllowFieldName, final String iRoleName);
+ public OIdentifiable disallowIdentity(final ODocument iDocument, final String iAllowFieldName, final OIdentifiable iId);
+
public OUser authenticate(String iUsername, String iUserPassword);
public OUser getUser(String iUserName);
diff --git a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityNull.java b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityNull.java
index b24088b851e..3992a1b153e 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityNull.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityNull.java
@@ -99,6 +99,11 @@ public OIdentifiable allowRole(ODocument iDocument, String iAllowFieldName, Stri
return null;
}
+ @Override
+ public OIdentifiable allowIdentity(ODocument iDocument, String iAllowFieldName, OIdentifiable iId) {
+ return null;
+ }
+
@Override
public OIdentifiable disallowUser(ODocument iDocument, String iAllowFieldName, String iUserName) {
return null;
@@ -108,4 +113,9 @@ public OIdentifiable disallowUser(ODocument iDocument, String iAllowFieldName, S
public OIdentifiable disallowRole(ODocument iDocument, String iAllowFieldName, String iRoleName) {
return null;
}
+
+ @Override
+ public OIdentifiable disallowIdentity(ODocument iDocument, String iAllowFieldName, OIdentifiable iId) {
+ return null;
+ }
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityProxy.java b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityProxy.java
index eedbd07578d..3b5e6d6bc8c 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityProxy.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityProxy.java
@@ -48,6 +48,11 @@ public OIdentifiable allowRole(final ODocument iDocument, final String iAllowFie
return delegate.allowRole(iDocument, iAllowFieldName, iRoleName);
}
+ @Override
+ public OIdentifiable allowIdentity(ODocument iDocument, String iAllowFieldName, OIdentifiable iId) {
+ return delegate.allowIdentity(iDocument, iAllowFieldName, iId);
+ }
+
public OIdentifiable disallowUser(final ODocument iDocument, final String iAllowFieldName, final String iUserName) {
return delegate.disallowUser(iDocument, iAllowFieldName, iUserName);
}
@@ -56,6 +61,11 @@ public OIdentifiable disallowRole(final ODocument iDocument, final String iAllow
return delegate.disallowRole(iDocument, iAllowFieldName, iRoleName);
}
+ @Override
+ public OIdentifiable disallowIdentity(ODocument iDocument, String iAllowFieldName, OIdentifiable iId) {
+ return delegate.disallowIdentity(iDocument, iAllowFieldName, iId);
+ }
+
public OUser create() {
return delegate.create();
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityShared.java b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityShared.java
index 621c062f024..80fdfe6e8d6 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityShared.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OSecurityShared.java
@@ -43,11 +43,13 @@
*
*/
public class OSecurityShared extends OSharedResourceAdaptive implements OSecurity, OCloseable {
- public static final String RESTRICTED_CLASSNAME = "ORestricted";
- public static final String ALLOW_ALL_FIELD = "_allow";
- public static final String ALLOW_READ_FIELD = "_allowRead";
- public static final String ALLOW_UPDATE_FIELD = "_allowUpdate";
- public static final String ALLOW_DELETE_FIELD = "_allowDelete";
+ public static final String RESTRICTED_CLASSNAME = "ORestricted";
+ public static final String ALLOW_ALL_FIELD = "_allow";
+ public static final String ALLOW_READ_FIELD = "_allowRead";
+ public static final String ALLOW_UPDATE_FIELD = "_allowUpdate";
+ public static final String ALLOW_DELETE_FIELD = "_allowDelete";
+ public static final String ONCREATE_IDENTITY_TYPE = "onCreate.identityType";
+ public static final String ONCREATE_FIELD = "onCreate.fields";
public OIdentifiable allowUser(final ODocument iDocument, final String iAllowFieldName, final String iUserName) {
final OUser user = ODatabaseRecordThreadLocal.INSTANCE.get().getMetadata().getSecurity().getUser(iUserName);
|
40331c29a3350a994d18c2b04c95e1d5a80527c3
|
restlet-framework-java
|
JAX-RS extension continued: - @FormParam is now- supported--
|
a
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/params/ContextInjector.java b/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/params/ContextInjector.java
index d1fd0162b3..edf0295928 100644
--- a/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/params/ContextInjector.java
+++ b/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/params/ContextInjector.java
@@ -262,6 +262,7 @@ public void injectInto(Object resource, boolean allMustBeAvailable)
* @throws ImplementationException
* the declaringClass must not be {@link UriInfo}
*/
+ @SuppressWarnings("unused") // TODO remove genericType, if not needed
static Object getInjectObject(Class<?> declaringClass, Type genericType,
ThreadLocalizedContext tlContext, Providers providers,
ExtensionBackwardMapping extensionBackwardMapping)
diff --git a/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/params/ParameterList.java b/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/params/ParameterList.java
index ff6a0f4429..3c1c68c081 100644
--- a/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/params/ParameterList.java
+++ b/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/params/ParameterList.java
@@ -38,6 +38,7 @@
import javax.ws.rs.CookieParam;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.Encoded;
+import javax.ws.rs.FormParam;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.MatrixParam;
import javax.ws.rs.PathParam;
@@ -387,8 +388,8 @@ static class CookieParamGetter extends NoEncParamGetter {
/**
* @param annoSaysLeaveClassEncoded
- * to check if the annotation is available, but should not
- * be.
+ * to check if the annotation is available, but should
+ * not be.
*/
CookieParamGetter(CookieParam cookieParam, DefaultValue defaultValue,
Class<?> convToCl, Type convToGen,
@@ -445,8 +446,8 @@ public Object getParamValue() {
/**
* Abstract super class for access to the entity or to @*Param where
- * encoded is allowed (@{@link PathParam}, @{@link MatrixParam} and
- * @{@link QueryParam}).
+ * encoded is allowed (@{@link PathParam}, @{@link MatrixParam}
+ * and @{@link QueryParam}).
*/
abstract static class EncParamGetter extends AbstractParamGetter {
@@ -471,7 +472,7 @@ static class HeaderParamGetter extends NoEncParamGetter {
/**
* @param annoSaysLeaveClassEncoded
- * to check if the annotation is available.
+ * to check if the annotation is available.
*/
HeaderParamGetter(HeaderParam headerParam, DefaultValue defaultValue,
Class<?> convToCl, Type paramGenericType,
@@ -533,15 +534,14 @@ public Object getParamValue() {
/**
* Abstract super class for access to the entity or to @*Param where
- * encoded is allowed (@{@link PathParam}, @{@link MatrixParam} and
- * @{@link QueryParam}).
+ * encoded is allowed (@{@link PathParam}, @{@link MatrixParam}
+ * and @{@link QueryParam}).
*/
abstract static class NoEncParamGetter extends AbstractParamGetter {
- // TODO support @FormParam
/**
* @param annoSaysLeaveEncoded
- * to check if the annotation is available.
+ * to check if the annotation is available.
*/
NoEncParamGetter(DefaultValue defaultValue, Class<?> convToCl,
Type convToGen, ThreadLocalizedContext tlContext,
@@ -626,7 +626,7 @@ public Object getParamValue() {
}
}
- static class QueryParamGetter extends EncParamGetter {
+ static class QueryParamGetter extends FormOrQueryParamGetter {
private final QueryParam queryParam;
@@ -643,14 +643,33 @@ public Object getParamValue() {
.getResourceRef();
final String queryString = resourceRef.getQuery();
final Form form = Converter.toFormEncoded(queryString, localLogger);
- // NICE cache Form
final String paramName = this.queryParam.value();
+ return super.getParamValue(form, paramName);
+ }
+ }
+
+ static abstract class FormOrQueryParamGetter extends EncParamGetter {
+
+ FormOrQueryParamGetter(DefaultValue defaultValue, Class<?> convToCl,
+ Type convToGen, ThreadLocalizedContext tlContext,
+ boolean leaveEncoded) {
+ super(defaultValue, convToCl, convToGen, tlContext, leaveEncoded);
+ }
+
+ /**
+ * @param form
+ * @param paramName
+ * @return
+ * @throws ConvertQueryParamException
+ */
+ Object getParamValue(final Form form, final String paramName)
+ throws ConvertQueryParamException {
final List<Parameter> parameters = form.subList(paramName);
try {
if (this.collType == null) { // no collection parameter
- final Parameter firstQueryParam = form.getFirst(paramName);
+ final Parameter firstFormParam = form.getFirst(paramName);
final String queryParamValue = WrapperUtil
- .getValue(firstQueryParam);
+ .getValue(firstFormParam);
return convertParamValue(queryParamValue);
}
ParamValueIter queryParamValueIter;
@@ -662,9 +681,28 @@ public Object getParamValue() {
}
}
+ static class FormParamGetter extends FormOrQueryParamGetter {
+
+ private final FormParam formParam;
+
+ FormParamGetter(FormParam formParam, DefaultValue defaultValue,
+ Class<?> convToCl, Type convToGen,
+ ThreadLocalizedContext tlContext, boolean leaveEncoded) {
+ super(defaultValue, convToCl, convToGen, tlContext, leaveEncoded);
+ this.formParam = formParam;
+ }
+
+ @Override
+ public Object getParamValue() {
+ final Form form = this.tlContext.get().getRequest()
+ .getEntityAsForm();
+ final String paramName = this.formParam.value();
+ return super.getParamValue(form, paramName);
+ }
+ }
+
/**
- * @author Stephan
- *
+ * @author Stephan Koops
*/
private static class UriInfoGetter implements ParamGetter {
@@ -773,6 +811,12 @@ static boolean getLeaveEncoded(Annotation[] annotations) {
/** @see #paramCount */
private final ParamGetter[] parameters;
+ /**
+ * must call the {@link EntityGetter} first, if @{@link FormParam} is
+ * used. A value less than zero means, that no special handling is needed.
+ */
+ private final int entityPosition;
+
/**
* @param parameterTypes
* @param genParamTypes
@@ -782,29 +826,31 @@ static boolean getLeaveEncoded(Annotation[] annotations) {
* @param jaxRsProviders
* @param extensionBackwardMapping
* @param paramsAllowed
- * true, if @*Params are allowed as parameter, otherwise
- * false.
+ * true, if @*Params are allowed as parameter, otherwise
+ * false.
* @param entityAllowed
- * true, if the entity is allowed as parameter, otherwise false.
+ * true, if the entity is allowed as parameter, otherwise
+ * false.
* @param logger
* @param allMustBeAvailable
- * if true, all values must be available (for singeltons creation
- * it must be false)
+ * if true, all values must be available (for singeltons
+ * creation it must be false)
* @throws MissingAnnotationException
* @throws IllegalTypeException
- * if the given class is not valid to be annotated with @
- * {@link Context}.
+ * if the given class is not valid to be annotated with
+ * @ {@link Context}.
*/
private ParameterList(Class<?>[] parameterTypes, Type[] genParamTypes,
Annotation[][] paramAnnoss, ThreadLocalizedContext tlContext,
boolean leaveAllEncoded, JaxRsProviders jaxRsProviders,
ExtensionBackwardMapping extensionBackwardMapping,
- boolean paramsAllowed,
- boolean entityAllowed, Logger logger, boolean allMustBeAvailable) throws MissingAnnotationException,
+ boolean paramsAllowed, boolean entityAllowed, Logger logger,
+ boolean allMustBeAvailable) throws MissingAnnotationException,
IllegalTypeException {
this.paramCount = parameterTypes.length;
this.parameters = new ParamGetter[this.paramCount];
boolean entityAlreadyRead = false;
+ int entityPosition = -1;
for (int i = 0; i < this.paramCount; i++) {
final Class<?> parameterType = parameterTypes[i];
final Type genParamType = genParamTypes[i];
@@ -835,6 +881,7 @@ private ParameterList(Class<?>[] parameterTypes, Type[] genParamTypes,
final PathParam pathParam = getAnno(paramAnnos, PathParam.class);
final QueryParam queryParam = getAnno(paramAnnos,
QueryParam.class);
+ final FormParam formParam = getAnno(paramAnnos, FormParam.class);
if (pathParam != null) {
this.parameters[i] = new PathParamGetter(pathParam,
defValue, parameterType, genParamType, tlContext,
@@ -860,6 +907,11 @@ private ParameterList(Class<?>[] parameterTypes, Type[] genParamTypes,
defValue, parameterType, genParamType, tlContext,
leaveAllEncoded || leaveThisEncoded);
continue;
+ } else if (formParam != null) {
+ this.parameters[i] = new FormParamGetter(formParam,
+ defValue, parameterType, genParamType, tlContext,
+ leaveAllEncoded || leaveThisEncoded);
+ continue;
}
}
// could only be the entity here
@@ -880,11 +932,13 @@ private ParameterList(Class<?>[] parameterTypes, Type[] genParamTypes,
genParamType, logger);
}
if (this.parameters[i] == null) {
- this.parameters[i] = new EntityGetter(parameterType, genParamType,
- tlContext, jaxRsProviders, paramAnnos);
+ this.parameters[i] = new EntityGetter(parameterType,
+ genParamType, tlContext, jaxRsProviders, paramAnnos);
}
+ entityPosition = i;
entityAlreadyRead = true;
}
+ this.entityPosition = entityPosition;
}
/**
@@ -898,20 +952,19 @@ private ParameterList(Class<?>[] parameterTypes, Type[] genParamTypes,
* @param allMustBeAvailable
* @throws MissingAnnotationException
* @throws IllegalTypeException
- * if one of the parameters contains a @{@link Context} on
- * an type that must not be annotated with @{@link Context}.
+ * if one of the parameters contains a @{@link Context}
+ * on an type that must not be annotated with @{@link Context}.
*/
public ParameterList(Constructor<?> constr,
ThreadLocalizedContext tlContext, boolean leaveEncoded,
JaxRsProviders jaxRsProviders,
ExtensionBackwardMapping extensionBackwardMapping,
- boolean paramsAllowed,
- Logger logger, boolean allMustBeAvailable)
+ boolean paramsAllowed, Logger logger, boolean allMustBeAvailable)
throws MissingAnnotationException, IllegalTypeException {
this(constr.getParameterTypes(), constr.getGenericParameterTypes(),
constr.getParameterAnnotations(), tlContext, leaveEncoded,
- jaxRsProviders, extensionBackwardMapping, paramsAllowed,
- false, logger, allMustBeAvailable);
+ jaxRsProviders, extensionBackwardMapping, paramsAllowed, false,
+ logger, allMustBeAvailable);
}
/**
@@ -925,21 +978,20 @@ public ParameterList(Constructor<?> constr,
* @param logger
* @throws MissingAnnotationException
* @throws IllegalTypeException
- * if one of the parameters contains a @{@link Context} on
- * an type that must not be annotated with @{@link Context}.
+ * if one of the parameters contains a @{@link Context}
+ * on an type that must not be annotated with @{@link Context}.
*/
public ParameterList(Method executeMethod, Method annotatedMethod,
ThreadLocalizedContext tlContext, boolean leaveEncoded,
JaxRsProviders jaxRsProviders,
ExtensionBackwardMapping extensionBackwardMapping,
- boolean entityAllowed,
- Logger logger)
+ boolean entityAllowed, Logger logger)
throws MissingAnnotationException, IllegalTypeException {
this(executeMethod.getParameterTypes(), executeMethod
.getGenericParameterTypes(), annotatedMethod
.getParameterAnnotations(), tlContext, leaveEncoded,
- jaxRsProviders, extensionBackwardMapping, true,
- entityAllowed, logger, true);
+ jaxRsProviders, extensionBackwardMapping, true, entityAllowed,
+ logger, true);
}
/**
@@ -953,8 +1005,13 @@ public ParameterList(Method executeMethod, Method annotatedMethod,
public Object[] get() throws ConvertRepresentationException,
InvocationTargetException, WebApplicationException {
final Object[] args = new Object[this.parameters.length];
+ if (this.entityPosition >= 0) {
+ args[entityPosition] = this.parameters[entityPosition].getValue();
+ }
for (int i = 0; i < this.paramCount; i++) {
- args[i] = this.parameters[i].getValue();
+ if (i != this.entityPosition) {
+ args[i] = this.parameters[i].getValue();
+ }
}
return args;
}
diff --git a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/resources/FormTestResource.java b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/resources/FormTestResource.java
index 55ec848ee3..1c3f7c7924 100644
--- a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/resources/FormTestResource.java
+++ b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/resources/FormTestResource.java
@@ -17,9 +17,18 @@
*/
package org.restlet.test.jaxrs.services.resources;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.TreeSet;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.FormParam;
+import javax.ws.rs.POST;
import javax.ws.rs.Path;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Request;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.core.StreamingOutput;
import javax.ws.rs.core.UriInfo;
/**
@@ -28,11 +37,86 @@
* @see UriInfo#getAncestorResources()
* @see UriInfo#getAncestorResourceURIs()
*/
-@Path("ancestorTest")
+@Path("formTest")
public class FormTestResource {
- public Object getByRequest(@Context Request request) {
- return "";
- // TODO FormTestResource
+ @Consumes(MediaType.APPLICATION_FORM_URLENCODED)
+ @Produces(MediaType.TEXT_PLAIN)
+ @Path("formOnly")
+ @POST
+ public Object formOnly(final MultivaluedMap<String, String> form) {
+ return new StreamingOutput() {
+ public void write(OutputStream out) throws IOException {
+ for (final String key : new TreeSet<String>(form.keySet())) {
+ for (final String value : form.get(key)) {
+ out.write(key.getBytes());
+ out.write(" -> ".getBytes());
+ out.write(value.getBytes());
+ out.write('\n');
+ }
+ }
+ }
+ };
+ }
+
+ @Consumes(MediaType.APPLICATION_FORM_URLENCODED)
+ @Produces(MediaType.TEXT_PLAIN)
+ @Path("paramOnly")
+ @POST
+ public Object paramOnly(@FormParam("a")
+ String a, @FormParam("b")
+ String b) {
+ return "a -> " + a + "\nb -> " + b + "\n";
+ }
+
+ @Consumes(MediaType.APPLICATION_FORM_URLENCODED)
+ @Produces(MediaType.TEXT_PLAIN)
+ @Path("formAndParam")
+ @POST
+ public Object formAndParam(final MultivaluedMap<String, String> form,
+ @FormParam("a")
+ final String a) {
+ return new StreamingOutput() {
+ public void write(OutputStream out) throws IOException {
+ out.write("a -> ".getBytes());
+ out.write(a.getBytes());
+ out.write('\n');
+ for (final String key : new TreeSet<String>(form.keySet())) {
+ if (!key.equals("a")) {
+ for (final String value : form.get(key)) {
+ out.write(key.getBytes());
+ out.write(" -> ".getBytes());
+ out.write(value.getBytes());
+ out.write('\n');
+ }
+ }
+ }
+ }
+ };
+ }
+
+ @Consumes(MediaType.APPLICATION_FORM_URLENCODED)
+ @Produces(MediaType.TEXT_PLAIN)
+ @Path("paramAndForm")
+ @POST
+ public Object paramAndForm(@FormParam("a")
+ final String a, final MultivaluedMap<String, String> form) {
+ return new StreamingOutput() {
+ public void write(OutputStream out) throws IOException {
+ out.write("a -> ".getBytes());
+ out.write(a.getBytes());
+ out.write('\n');
+ for (final String key : new TreeSet<String>(form.keySet())) {
+ if (!key.equals("a")) {
+ for (final String value : form.get(key)) {
+ out.write(key.getBytes());
+ out.write(" -> ".getBytes());
+ out.write(value.getBytes());
+ out.write('\n');
+ }
+ }
+ }
+ }
+ };
}
}
\ No newline at end of file
diff --git a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/AllServiceTests.java b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/AllServiceTests.java
index b8ab2089c7..df2b80d00d 100644
--- a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/AllServiceTests.java
+++ b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/AllServiceTests.java
@@ -38,6 +38,7 @@ public static Test suite() {
mySuite.addTestSuite(CookieParamTest.class);
mySuite.addTestSuite(DeterminingMediaTypeTest.class);
mySuite.addTestSuite(ExcMapperTest.class);
+ mySuite.addTestSuite(FormTest.class);
mySuite.addTestSuite(GenericTypeTestCase.class);
mySuite.addTestSuite(HeadOptionsTest.class);
mySuite.addTestSuite(HttpHeaderTest.class);
diff --git a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/FormTest.java b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/FormTest.java
index 35617c8312..53bee06a1c 100644
--- a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/FormTest.java
+++ b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/FormTest.java
@@ -17,14 +17,16 @@
*/
package org.restlet.test.jaxrs.services.tests;
+import java.io.IOException;
+
+import org.restlet.data.Form;
import org.restlet.data.Response;
import org.restlet.data.Status;
-import org.restlet.test.jaxrs.services.resources.AncestorTestService;
import org.restlet.test.jaxrs.services.resources.FormTestResource;
/**
* @author Stephan Koops
- * @see AncestorTestService
+ * @see FormTestResource
*/
public class FormTest extends JaxRsTestCase {
@@ -33,90 +35,45 @@ protected Class<?> getRootResourceClass() {
return FormTestResource.class;
}
- public void testGet() throws Exception {
- Response response = get();
- sysOutEntityIfError(response);
- assertEquals(Status.SUCCESS_OK, response.getStatus());
- assertEquals("0\n0", response.getEntity().getText());
+ public void testFormOnly() throws IOException {
+ check("formOnly");
}
- public void testUri() throws Exception {
- Response response = get("uris");
- sysOutEntityIfError(response);
- assertEquals(Status.SUCCESS_OK, response.getStatus());
- assertEquals("1\n/ancestorTest", response.getEntity().getText());
+ public void testFormAndParam() throws IOException {
+ check("formAndParam");
}
- public void testResourceClassNames() throws Exception {
- Response response = get("resourceClassNames");
- sysOutEntityIfError(response);
- assertEquals(Status.SUCCESS_OK, response.getStatus());
- assertEquals(
- "1\norg.restlet.test.jaxrs.services.resources.AncestorTestService",
- response.getEntity().getText());
+ /** @see FormTestResource#paramOnly(String, String) */
+ public void testParamOnly() throws IOException {
+ check("paramOnly");
}
- /**
- * @see AncestorTestService#getUriInfoAttribute(javax.ws.rs.core.UriInfo,
- * String)
- */
- public void testUriInfos() throws Exception {
- Response response404 = get("uriInfo/abc");
- assertEquals(Status.CLIENT_ERROR_NOT_FOUND, response404.getStatus());
-
- Response response = get("uriInfo/ancestorResourceURIs");
- sysOutEntityIfError(response);
- assertEquals(Status.SUCCESS_OK, response.getStatus());
- String entity = response.getEntity().getText();
- String expected = "[]\n[/ancestorTest]";
- System.out.println("expected:\n"+expected+"\ngot:\n"+entity);
- assertEquals(expected, entity);
+ public void testParamAndForm() throws IOException {
+ check("paramAndForm");
}
/**
- * @see AncestorTestService#getUriInfoAttribute(javax.ws.rs.core.UriInfo,
- * String)
+ * @param subPath
+ * @throws IOException
*/
- public void testUriInfosSub() throws Exception {
- Response response404 = get("sub/uriInfo/abc");
- assertEquals(Status.CLIENT_ERROR_NOT_FOUND, response404.getStatus());
-
- Response response = get("sub/uriInfo/ancestorResourceURIs");
- sysOutEntityIfError(response);
- assertEquals(Status.SUCCESS_OK, response.getStatus());
- String entity = response.getEntity().getText();
- String expected = "[]\n[/ancestorTest/sub, /ancestorTest]";
- System.out.println("expected:\n"+expected+"\ngot:\n"+entity);
- assertEquals(expected, entity);
- }
-
- public void testGetSub() throws Exception {
- Response response = get("sub");
+ private void check(String subPath) throws IOException {
+ Form form = new Form();
+ form.add("a", "b");
+ Response response = post(subPath, form.getWebRepresentation());
sysOutEntityIfError(response);
assertEquals(Status.SUCCESS_OK, response.getStatus());
- assertEquals("1\n1", response.getEntity().getText());
- }
+ assertEquals("a -> b\n", response.getEntity().getText());
- public void testGetSubSub() throws Exception {
- Response response = get("sub/sub");
+ form.add("c", "d");
+ response = post(subPath, form.getWebRepresentation());
sysOutEntityIfError(response);
assertEquals(Status.SUCCESS_OK, response.getStatus());
- assertEquals("2\n2", response.getEntity().getText());
- }
-
- public void testGetSubSameSub() throws Exception {
- Response response = get("sub/sameSub");
- sysOutEntityIfError(response);
- assertEquals(Status.SUCCESS_OK, response.getStatus());
- assertEquals("2\n2", response.getEntity().getText());
- }
+ assertEquals("a -> b\nc -> d\n", response.getEntity().getText());
- public void testSameSubSubUri() throws Exception {
- Response response = get("sameSub/sub/uris");
+ form.add("c", "d");
+ response = post(subPath, form.getWebRepresentation());
sysOutEntityIfError(response);
assertEquals(Status.SUCCESS_OK, response.getStatus());
- assertEquals(
- "3\n/ancestorTest/sameSub/sub\n/ancestorTest/sameSub\n/ancestorTest",
- response.getEntity().getText());
+ assertEquals("a -> b\nc -> d\nc -> d\n", response.getEntity().getText());
}
}
\ No newline at end of file
|
135b6c13e03dbb519f8163f5ee5b2c8d7c87684c
|
ReactiveX-RxJava
|
Some cleaning up--
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-contrib/rxjava-android/src/main/java/rx/operators/OperatorWeakBinding.java b/rxjava-contrib/rxjava-android/src/main/java/rx/operators/OperatorWeakBinding.java
index 43a501b489..b1adb3d34d 100644
--- a/rxjava-contrib/rxjava-android/src/main/java/rx/operators/OperatorWeakBinding.java
+++ b/rxjava-contrib/rxjava-android/src/main/java/rx/operators/OperatorWeakBinding.java
@@ -52,7 +52,7 @@ private WeakSubscriber(Subscriber<? super T> source) {
@Override
public void onCompleted() {
- Subscriber<? super T> sub = subscriberRef.get();
+ final Subscriber<? super T> sub = subscriberRef.get();
if (shouldForwardNotification(sub)) {
sub.onCompleted();
} else {
@@ -62,7 +62,7 @@ public void onCompleted() {
@Override
public void onError(Throwable e) {
- Subscriber<? super T> sub = subscriberRef.get();
+ final Subscriber<? super T> sub = subscriberRef.get();
if (shouldForwardNotification(sub)) {
sub.onError(e);
} else {
@@ -72,7 +72,7 @@ public void onError(Throwable e) {
@Override
public void onNext(T t) {
- Subscriber<? super T> sub = subscriberRef.get();
+ final Subscriber<? super T> sub = subscriberRef.get();
if (shouldForwardNotification(sub)) {
sub.onNext(t);
} else {
@@ -90,7 +90,8 @@ private void handleLostBinding(Subscriber<? super T> sub, String context) {
Log.d(LOG_TAG, "subscriber gone; skipping " + context);
} else {
final R r = boundRef.get();
- if (r != null) { // the predicate failed to validate
+ if (r != null) {
+ // the predicate failed to validate
Log.d(LOG_TAG, "bound component has become invalid; skipping " + context);
} else {
Log.d(LOG_TAG, "bound component gone; skipping " + context);
|
1e49dcda27564e133e5528db215d8fb2d08130d0
|
kotlin
|
Extract Function: Make member/top-level function- private by default--
|
c
|
https://github.com/JetBrains/kotlin
|
diff --git a/idea/src/org/jetbrains/jet/plugin/refactoring/extractFunction/ui/KotlinExtractFunctionDialog.java b/idea/src/org/jetbrains/jet/plugin/refactoring/extractFunction/ui/KotlinExtractFunctionDialog.java
index b94ae589d0e70..98af2355c7234 100644
--- a/idea/src/org/jetbrains/jet/plugin/refactoring/extractFunction/ui/KotlinExtractFunctionDialog.java
+++ b/idea/src/org/jetbrains/jet/plugin/refactoring/extractFunction/ui/KotlinExtractFunctionDialog.java
@@ -125,8 +125,7 @@ public void documentChanged(DocumentEvent event) {
boolean enableVisibility = isVisibilitySectionAvailable();
visibilityBox.setEnabled(enableVisibility);
if (enableVisibility) {
- String visibility = originalDescriptor.getDescriptor().getVisibility();
- visibilityBox.setSelectedItem(visibility.isEmpty() ? "internal" : visibility);
+ visibilityBox.setSelectedItem("private");
}
visibilityBox.addItemListener(
new ItemListener() {
|
39fe690ec815b3e957a98c498d9f3b4c9b544981
|
kotlin
|
js enumerable for ecma5--
|
a
|
https://github.com/JetBrains/kotlin
|
diff --git a/js/js.libraries/src/core/annotations.kt b/js/js.libraries/src/core/annotations.kt
index adafe8c4aa94b..67dba0140cb72 100644
--- a/js/js.libraries/src/core/annotations.kt
+++ b/js/js.libraries/src/core/annotations.kt
@@ -3,4 +3,6 @@ package js;
native
annotation class native(name : String = "") {}
native
-annotation class library(name : String = "") {}
\ No newline at end of file
+annotation class library(name : String = "") {}
+native
+annotation class enumerable() {}
\ No newline at end of file
diff --git a/js/js.tests/test/org/jetbrains/k2js/test/semantics/PropertyAccessTest.java b/js/js.tests/test/org/jetbrains/k2js/test/semantics/PropertyAccessTest.java
index 1e01ecba839d3..3f602c9509942 100644
--- a/js/js.tests/test/org/jetbrains/k2js/test/semantics/PropertyAccessTest.java
+++ b/js/js.tests/test/org/jetbrains/k2js/test/semantics/PropertyAccessTest.java
@@ -16,10 +16,14 @@
package org.jetbrains.k2js.test.semantics;
+import com.google.common.collect.Lists;
+import org.jetbrains.annotations.NotNull;
import org.jetbrains.k2js.config.EcmaVersion;
import org.jetbrains.k2js.test.SingleFileTranslationTest;
+import org.jetbrains.k2js.test.utils.JsTestUtils;
import java.util.EnumSet;
+import java.util.List;
/**
* @author Pavel Talanov
@@ -79,4 +83,18 @@ public void testExtensionLiteralSafeCall() throws Exception {
public void testInitInstanceProperties() throws Exception {
fooBoxTest(EnumSet.of(EcmaVersion.v5));
}
+
+ public void testEnumerable() throws Exception {
+ fooBoxTest(JsTestUtils.successOnEcmaV5());
+ }
+
+ @Override
+ @NotNull
+ protected List<String> additionalJSFiles(@NotNull EcmaVersion ecmaVersion) {
+ List<String> result = Lists.newArrayList(super.additionalJSFiles(ecmaVersion));
+ if (getName().equals("testEnumerable")) {
+ result.add(pathToTestFiles() + "enumerate.js");
+ }
+ return result;
+ }
}
diff --git a/js/js.tests/test/org/jetbrains/k2js/test/utils/JsTestUtils.java b/js/js.tests/test/org/jetbrains/k2js/test/utils/JsTestUtils.java
index dd103c1a1b023..2f3165fc4e230 100644
--- a/js/js.tests/test/org/jetbrains/k2js/test/utils/JsTestUtils.java
+++ b/js/js.tests/test/org/jetbrains/k2js/test/utils/JsTestUtils.java
@@ -24,6 +24,7 @@
import java.io.FileInputStream;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.EnumSet;
import java.util.List;
/**
@@ -34,6 +35,11 @@ public final class JsTestUtils {
private JsTestUtils() {
}
+ @NotNull
+ public static EnumSet<EcmaVersion> successOnEcmaV5() {
+ return EnumSet.of(EcmaVersion.v5);
+ }
+
@NotNull
public static String convertFileNameToDotJsFile(@NotNull String filename, @NotNull EcmaVersion ecmaVersion) {
String postFix = "_" + ecmaVersion.toString() + ".js";
diff --git a/js/js.translator/src/org/jetbrains/k2js/translate/context/Namer.java b/js/js.translator/src/org/jetbrains/k2js/translate/context/Namer.java
index 52da0995874a0..61499a1b72802 100644
--- a/js/js.translator/src/org/jetbrains/k2js/translate/context/Namer.java
+++ b/js/js.translator/src/org/jetbrains/k2js/translate/context/Namer.java
@@ -122,6 +122,9 @@ public static Namer newInstance(@NotNull JsScope rootScope) {
@NotNull
private final JsPropertyInitializer writablePropertyDescriptorField;
+ @NotNull
+ private final JsPropertyInitializer enumerablePropertyDescriptorField;
+
private Namer(@NotNull JsScope rootScope) {
kotlinName = rootScope.declareName(KOTLIN_OBJECT_NAME);
kotlinScope = new JsScope(rootScope, "Kotlin standard object");
@@ -132,7 +135,9 @@ private Namer(@NotNull JsScope rootScope) {
isTypeName = kotlinScope.declareName("isType");
- writablePropertyDescriptorField = new JsPropertyInitializer(new JsNameRef("writable"), rootScope.getProgram().getTrueLiteral());
+ JsProgram program = rootScope.getProgram();
+ writablePropertyDescriptorField = new JsPropertyInitializer(program.getStringLiteral("writable"), program.getTrueLiteral());
+ enumerablePropertyDescriptorField = new JsPropertyInitializer(program.getStringLiteral("enumerable"), program.getTrueLiteral());
}
@NotNull
@@ -190,6 +195,11 @@ public JsPropertyInitializer writablePropertyDescriptorField() {
return writablePropertyDescriptorField;
}
+ @NotNull
+ public JsPropertyInitializer enumerablePropertyDescriptorField() {
+ return enumerablePropertyDescriptorField;
+ }
+
@NotNull
/*package*/ JsScope getKotlinScope() {
return kotlinScope;
diff --git a/js/js.translator/src/org/jetbrains/k2js/translate/declaration/DeclarationBodyVisitor.java b/js/js.translator/src/org/jetbrains/k2js/translate/declaration/DeclarationBodyVisitor.java
index 683b7427ce166..73a973e089a3a 100644
--- a/js/js.translator/src/org/jetbrains/k2js/translate/declaration/DeclarationBodyVisitor.java
+++ b/js/js.translator/src/org/jetbrains/k2js/translate/declaration/DeclarationBodyVisitor.java
@@ -71,10 +71,9 @@ public List<JsPropertyInitializer> visitNamedFunction(@NotNull JetNamedFunction
@NotNull TranslationContext context) {
JsPropertyInitializer methodAsPropertyInitializer = Translation.functionTranslator(expression, context).translateAsMethod();
if (context.isEcma5()) {
- final FunctionDescriptor descriptor = getFunctionDescriptor(context.bindingContext(), expression);
- boolean overridable = descriptor.getModality().isOverridable();
+ FunctionDescriptor descriptor = getFunctionDescriptor(context.bindingContext(), expression);
JsExpression methodBodyExpression = methodAsPropertyInitializer.getValueExpr();
- methodAsPropertyInitializer.setValueExpr(JsAstUtils.createPropertyDataDescriptor(overridable, methodBodyExpression, context));
+ methodAsPropertyInitializer.setValueExpr(JsAstUtils.createPropertyDataDescriptor(descriptor, methodBodyExpression, context));
}
return Collections.singletonList(methodAsPropertyInitializer);
}
diff --git a/js/js.translator/src/org/jetbrains/k2js/translate/utils/AnnotationsUtils.java b/js/js.translator/src/org/jetbrains/k2js/translate/utils/AnnotationsUtils.java
index 303ca7742361c..a6256535d821a 100644
--- a/js/js.translator/src/org/jetbrains/k2js/translate/utils/AnnotationsUtils.java
+++ b/js/js.translator/src/org/jetbrains/k2js/translate/utils/AnnotationsUtils.java
@@ -19,6 +19,7 @@
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.jet.lang.descriptors.ClassDescriptor;
+import org.jetbrains.jet.lang.descriptors.ClassKind;
import org.jetbrains.jet.lang.descriptors.DeclarationDescriptor;
import org.jetbrains.jet.lang.descriptors.annotations.AnnotationDescriptor;
import org.jetbrains.jet.lang.resolve.DescriptorUtils;
@@ -31,6 +32,8 @@
*/
public final class AnnotationsUtils {
+ private static final String ENUMERABLE = "js.enumerable";
+
private AnnotationsUtils() {
}
@@ -69,10 +72,14 @@ public static String getNameForAnnotatedObject(@NotNull DeclarationDescriptor de
@Nullable
private static AnnotationDescriptor getAnnotationByName(@NotNull DeclarationDescriptor descriptor,
- @NotNull PredefinedAnnotation annotation) {
+ @NotNull PredefinedAnnotation annotation) {
+ return getAnnotationByName(descriptor, annotation.getFQName());
+ }
+
+ @Nullable
+ private static AnnotationDescriptor getAnnotationByName(@NotNull DeclarationDescriptor descriptor, @NotNull String fqn) {
for (AnnotationDescriptor annotationDescriptor : descriptor.getAnnotations()) {
- String annotationClassFQName = getAnnotationClassFQName(annotationDescriptor);
- if (annotationClassFQName.equals(annotation.getFQName())) {
+ if (getAnnotationClassFQName(annotationDescriptor).equals(fqn)) {
return annotationDescriptor;
}
}
@@ -91,6 +98,16 @@ public static boolean isNativeObject(@NotNull DeclarationDescriptor descriptor)
return hasAnnotationOrInsideAnnotatedClass(descriptor, PredefinedAnnotation.NATIVE);
}
+ public static boolean isEnumerable(@NotNull DeclarationDescriptor descriptor) {
+ if (getAnnotationByName(descriptor, ENUMERABLE) != null) {
+ return true;
+ }
+ ClassDescriptor containingClass = getContainingClass(descriptor);
+ return containingClass != null &&
+ (getAnnotationByName(containingClass, ENUMERABLE) != null ||
+ (containingClass.getKind().equals(ClassKind.OBJECT) && containingClass.getName().isSpecial()));
+ }
+
public static boolean isLibraryObject(@NotNull DeclarationDescriptor descriptor) {
return hasAnnotationOrInsideAnnotatedClass(descriptor, PredefinedAnnotation.LIBRARY);
}
@@ -105,14 +122,15 @@ public static boolean isPredefinedObject(@NotNull DeclarationDescriptor descript
}
public static boolean hasAnnotationOrInsideAnnotatedClass(@NotNull DeclarationDescriptor descriptor,
- @NotNull PredefinedAnnotation annotation) {
- if (getAnnotationByName(descriptor, annotation) != null) {
+ @NotNull PredefinedAnnotation annotation) {
+ return hasAnnotationOrInsideAnnotatedClass(descriptor, annotation.getFQName());
+ }
+
+ private static boolean hasAnnotationOrInsideAnnotatedClass(@NotNull DeclarationDescriptor descriptor, @NotNull String fqn) {
+ if (getAnnotationByName(descriptor, fqn) != null) {
return true;
}
ClassDescriptor containingClass = getContainingClass(descriptor);
- if (containingClass == null) {
- return false;
- }
- return (getAnnotationByName(containingClass, annotation) != null);
+ return containingClass != null && getAnnotationByName(containingClass, fqn) != null;
}
}
diff --git a/js/js.translator/src/org/jetbrains/k2js/translate/utils/JsAstUtils.java b/js/js.translator/src/org/jetbrains/k2js/translate/utils/JsAstUtils.java
index fbbc442187f34..199a00d85af3b 100644
--- a/js/js.translator/src/org/jetbrains/k2js/translate/utils/JsAstUtils.java
+++ b/js/js.translator/src/org/jetbrains/k2js/translate/utils/JsAstUtils.java
@@ -21,6 +21,8 @@
import com.google.dart.compiler.util.AstUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
+import org.jetbrains.jet.lang.descriptors.DeclarationDescriptor;
+import org.jetbrains.jet.lang.descriptors.FunctionDescriptor;
import org.jetbrains.jet.lang.descriptors.PropertyDescriptor;
import org.jetbrains.k2js.translate.context.TranslationContext;
@@ -289,11 +291,19 @@ public static JsInvocation definePropertyDataDescriptor(@NotNull PropertyDescrip
@NotNull TranslationContext context) {
return AstUtil.newInvocation(DEFINE_PROPERTY, new JsThisRef(),
context.program().getStringLiteral(context.getNameForDescriptor(descriptor).getIdent()),
- createPropertyDataDescriptor(descriptor.isVar(), value, context));
+ createPropertyDataDescriptor(descriptor.isVar(), descriptor, value, context));
}
@NotNull
- public static JsObjectLiteral createPropertyDataDescriptor(boolean writable,
+ public static JsObjectLiteral createPropertyDataDescriptor(@NotNull FunctionDescriptor descriptor,
+ @NotNull JsExpression value,
+ @NotNull TranslationContext context) {
+ return createPropertyDataDescriptor(descriptor.getModality().isOverridable(), descriptor, value, context);
+ }
+
+ @NotNull
+ private static JsObjectLiteral createPropertyDataDescriptor(boolean writable,
+ @NotNull DeclarationDescriptor descriptor,
@NotNull JsExpression value,
@NotNull TranslationContext context) {
JsObjectLiteral jsPropertyDescriptor = new JsObjectLiteral();
@@ -302,6 +312,9 @@ public static JsObjectLiteral createPropertyDataDescriptor(boolean writable,
if (writable) {
meta.add(context.namer().writablePropertyDescriptorField());
}
+ if (AnnotationsUtils.isEnumerable(descriptor)) {
+ meta.add(context.namer().enumerablePropertyDescriptorField());
+ }
return jsPropertyDescriptor;
}
diff --git a/js/js.translator/src/org/jetbrains/k2js/translate/utils/PredefinedAnnotation.java b/js/js.translator/src/org/jetbrains/k2js/translate/utils/PredefinedAnnotation.java
index e2e52dbea3498..661b799e86240 100644
--- a/js/js.translator/src/org/jetbrains/k2js/translate/utils/PredefinedAnnotation.java
+++ b/js/js.translator/src/org/jetbrains/k2js/translate/utils/PredefinedAnnotation.java
@@ -22,8 +22,6 @@
* @author Pavel Talanov
*/
public enum PredefinedAnnotation {
-
-
LIBRARY("js.library"),
NATIVE("js.native");
diff --git a/js/js.translator/testFiles/propertyAccess/cases/enumerable.kt b/js/js.translator/testFiles/propertyAccess/cases/enumerable.kt
new file mode 100644
index 0000000000000..899f049cd9e3b
--- /dev/null
+++ b/js/js.translator/testFiles/propertyAccess/cases/enumerable.kt
@@ -0,0 +1,28 @@
+package foo
+
+import js.enumerable
+import js.native
+
+native
+fun <T> _enumerate(o:T):T = noImpl
+
+native
+fun _findFirst<T>(o:Any):T = noImpl
+
+enumerable
+class Test() {
+ val a:Int = 100
+ val b:String = "s"
+}
+
+class P() {
+ enumerable
+ val a:Int = 100
+ val b:String = "s"
+}
+
+fun box():Boolean {
+ val test = _enumerate(Test())
+ val p = _enumerate(P())
+ return (100 == test.a && "s" == test.b) && p.a == 100 && _findFirst<Int>(object {val test = 100}) == 100;
+}
diff --git a/js/js.translator/testFiles/propertyAccess/enumerate.js b/js/js.translator/testFiles/propertyAccess/enumerate.js
new file mode 100644
index 0000000000000..d8c8d0491d739
--- /dev/null
+++ b/js/js.translator/testFiles/propertyAccess/enumerate.js
@@ -0,0 +1,13 @@
+function _enumerate(o) {
+ var r = {};
+ for (var p in o) {
+ r[p] = o[p];
+ }
+ return r;
+}
+
+function _findFirst(o) {
+ for (var p in o) {
+ return o[p];
+ }
+}
|
81e6ff51623792ebe9cb7c277fccd044bdd432b9
|
elasticsearch
|
Allow for plugins to register REST filter (better- support with async execution and some renaming), closes -1658.--
|
a
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/main/java/org/elasticsearch/http/HttpServer.java b/src/main/java/org/elasticsearch/http/HttpServer.java
index 6ad5b876404de..0e5fb51f0ff12 100644
--- a/src/main/java/org/elasticsearch/http/HttpServer.java
+++ b/src/main/java/org/elasticsearch/http/HttpServer.java
@@ -51,6 +51,8 @@ public class HttpServer extends AbstractLifecycleComponent<HttpServer> {
private final boolean disableSites;
+ private final PluginSiteFilter pluginSiteFilter = new PluginSiteFilter();
+
@Inject
public HttpServer(Settings settings, Environment environment, HttpServerTransport transport,
RestController restController,
@@ -111,33 +113,33 @@ public HttpStats stats() {
public void internalDispatchRequest(final HttpRequest request, final HttpChannel channel) {
if (request.rawPath().startsWith("/_plugin/")) {
- for (RestPreProcessor preProcessor : restController.preProcessors()) {
- if (!preProcessor.handleExternal()) {
- continue;
- }
- if (!preProcessor.process(request, channel)) {
- return;
- }
- }
- handlePluginSite(request, channel);
+ RestFilterChain filterChain = restController.filterChain(pluginSiteFilter);
+ filterChain.continueProcessing(request, channel);
return;
}
- if (!restController.dispatchRequest(request, channel)) {
- if (request.method() == RestRequest.Method.OPTIONS) {
- // when we have OPTIONS request, simply send OK by default (with the Access Control Origin header which gets automatically added)
- StringRestResponse response = new StringRestResponse(OK);
- channel.sendResponse(response);
- } else {
- channel.sendResponse(new StringRestResponse(BAD_REQUEST, "No handler found for uri [" + request.uri() + "] and method [" + request.method() + "]"));
- }
+ restController.dispatchRequest(request, channel);
+ }
+
+
+ class PluginSiteFilter extends RestFilter {
+
+ @Override
+ public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) {
+ handlePluginSite((HttpRequest) request, (HttpChannel) channel);
}
}
- private void handlePluginSite(HttpRequest request, HttpChannel channel) {
+ void handlePluginSite(HttpRequest request, HttpChannel channel) {
if (disableSites) {
channel.sendResponse(new StringRestResponse(FORBIDDEN));
return;
}
+ if (request.method() == RestRequest.Method.OPTIONS) {
+ // when we have OPTIONS request, simply send OK by default (with the Access Control Origin header which gets automatically added)
+ StringRestResponse response = new StringRestResponse(OK);
+ channel.sendResponse(response);
+ return;
+ }
if (request.method() != RestRequest.Method.GET) {
channel.sendResponse(new StringRestResponse(FORBIDDEN));
return;
diff --git a/src/main/java/org/elasticsearch/rest/RestController.java b/src/main/java/org/elasticsearch/rest/RestController.java
index 30926d0fdba2a..e852e76c2f539 100644
--- a/src/main/java/org/elasticsearch/rest/RestController.java
+++ b/src/main/java/org/elasticsearch/rest/RestController.java
@@ -21,6 +21,8 @@
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
+import org.elasticsearch.ElasticSearchIllegalStateException;
+import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.path.PathTrie;
@@ -31,6 +33,9 @@
import java.util.Arrays;
import java.util.Comparator;
+import static org.elasticsearch.rest.RestStatus.BAD_REQUEST;
+import static org.elasticsearch.rest.RestStatus.OK;
+
/**
*
*/
@@ -43,8 +48,10 @@ public class RestController extends AbstractLifecycleComponent<RestController> {
private final PathTrie<RestHandler> headHandlers = new PathTrie<RestHandler>(RestUtils.REST_DECODER);
private final PathTrie<RestHandler> optionsHandlers = new PathTrie<RestHandler>(RestUtils.REST_DECODER);
+ private final RestHandlerFilter handlerFilter = new RestHandlerFilter();
+
// non volatile since the assumption is that pre processors are registered on startup
- private RestPreProcessor[] preProcessors = new RestPreProcessor[0];
+ private RestFilter[] filters = new RestFilter[0];
@Inject
public RestController(Settings settings) {
@@ -61,22 +68,25 @@ protected void doStop() throws ElasticSearchException {
@Override
protected void doClose() throws ElasticSearchException {
+ for (RestFilter filter : filters) {
+ filter.close();
+ }
}
/**
* Registers a pre processor to be executed before the rest request is actually handled.
*/
- public synchronized void registerPreProcessor(RestPreProcessor preProcessor) {
- RestPreProcessor[] copy = new RestPreProcessor[preProcessors.length + 1];
- System.arraycopy(preProcessors, 0, copy, 0, preProcessors.length);
- copy[preProcessors.length] = preProcessor;
- Arrays.sort(copy, new Comparator<RestPreProcessor>() {
+ public synchronized void registerFilter(RestFilter preProcessor) {
+ RestFilter[] copy = new RestFilter[filters.length + 1];
+ System.arraycopy(filters, 0, copy, 0, filters.length);
+ copy[filters.length] = preProcessor;
+ Arrays.sort(copy, new Comparator<RestFilter>() {
@Override
- public int compare(RestPreProcessor o1, RestPreProcessor o2) {
+ public int compare(RestFilter o1, RestFilter o2) {
return o2.order() - o1.order();
}
});
- preProcessors = copy;
+ filters = copy;
}
/**
@@ -107,30 +117,55 @@ public void registerHandler(RestRequest.Method method, String path, RestHandler
}
}
- public RestPreProcessor[] preProcessors() {
- return preProcessors;
+ /**
+ * Returns a filter chain (if needed) to execute. If this method returns null, simply execute
+ * as usual.
+ */
+ @Nullable
+ public RestFilterChain filterChainOrNull(RestFilter executionFilter) {
+ if (filters.length == 0) {
+ return null;
+ }
+ return new ControllerFilterChain(executionFilter);
+ }
+
+ /**
+ * Returns a filter chain with the final filter being the provided filter.
+ */
+ public RestFilterChain filterChain(RestFilter executionFilter) {
+ return new ControllerFilterChain(executionFilter);
}
- public boolean dispatchRequest(final RestRequest request, final RestChannel channel) {
- try {
- for (RestPreProcessor preProcessor : preProcessors) {
- if (!preProcessor.process(request, channel)) {
- return true;
+ public void dispatchRequest(final RestRequest request, final RestChannel channel) {
+ if (filters.length == 0) {
+ try {
+ executeHandler(request, channel);
+ } catch (Exception e) {
+ try {
+ channel.sendResponse(new XContentThrowableRestResponse(request, e));
+ } catch (IOException e1) {
+ logger.error("Failed to send failure response for uri [" + request.uri() + "]", e1);
}
}
- final RestHandler handler = getHandler(request);
- if (handler == null) {
- return false;
- }
+ } else {
+ ControllerFilterChain filterChain = new ControllerFilterChain(handlerFilter);
+ filterChain.continueProcessing(request, channel);
+ }
+ }
+
+ void executeHandler(RestRequest request, RestChannel channel) {
+ final RestHandler handler = getHandler(request);
+ if (handler != null) {
handler.handleRequest(request, channel);
- } catch (Exception e) {
- try {
- channel.sendResponse(new XContentThrowableRestResponse(request, e));
- } catch (IOException e1) {
- logger.error("Failed to send failure response for uri [" + request.uri() + "]", e1);
+ } else {
+ if (request.method() == RestRequest.Method.OPTIONS) {
+ // when we have OPTIONS request, simply send OK by default (with the Access Control Origin header which gets automatically added)
+ StringRestResponse response = new StringRestResponse(OK);
+ channel.sendResponse(response);
+ } else {
+ channel.sendResponse(new StringRestResponse(BAD_REQUEST, "No handler found for uri [" + request.uri() + "] and method [" + request.method() + "]"));
}
}
- return true;
}
private RestHandler getHandler(RestRequest request) {
@@ -159,4 +194,45 @@ private String getPath(RestRequest request) {
// my_index/my_type/http%3A%2F%2Fwww.google.com
return request.rawPath();
}
+
+ class ControllerFilterChain implements RestFilterChain {
+
+ private final RestFilter executionFilter;
+
+ private volatile int index;
+
+ ControllerFilterChain(RestFilter executionFilter) {
+ this.executionFilter = executionFilter;
+ }
+
+ @Override
+ public void continueProcessing(RestRequest request, RestChannel channel) {
+ try {
+ int loc = index;
+ if (loc > filters.length) {
+ throw new ElasticSearchIllegalStateException("filter continueProcessing was called more than expected");
+ } else if (loc == filters.length) {
+ executionFilter.process(request, channel, this);
+ } else {
+ RestFilter preProcessor = filters[loc];
+ preProcessor.process(request, channel, this);
+ }
+ index++;
+ } catch (Exception e) {
+ try {
+ channel.sendResponse(new XContentThrowableRestResponse(request, e));
+ } catch (IOException e1) {
+ logger.error("Failed to send failure response for uri [" + request.uri() + "]", e1);
+ }
+ }
+ }
+ }
+
+ class RestHandlerFilter extends RestFilter {
+
+ @Override
+ public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) {
+ executeHandler(request, channel);
+ }
+ }
}
diff --git a/src/main/java/org/elasticsearch/rest/RestPreProcessor.java b/src/main/java/org/elasticsearch/rest/RestFilter.java
similarity index 53%
rename from src/main/java/org/elasticsearch/rest/RestPreProcessor.java
rename to src/main/java/org/elasticsearch/rest/RestFilter.java
index 273fbd5ca19fd..cf71e9f9734c8 100644
--- a/src/main/java/org/elasticsearch/rest/RestPreProcessor.java
+++ b/src/main/java/org/elasticsearch/rest/RestFilter.java
@@ -19,30 +19,30 @@
package org.elasticsearch.rest;
+import org.elasticsearch.ElasticSearchException;
+import org.elasticsearch.common.component.CloseableComponent;
+
/**
- * Rest pre processor allowing to pre process REST requests.
- * <p/>
- * Experimental interface.
+ * A filter allowing to filter rest operations.
*/
-public interface RestPreProcessor {
+public abstract class RestFilter implements CloseableComponent {
/**
- * Optionally, the order the processor will work on. Execution is done from lowest value to highest.
- * It is a good practice to allow to configure this for the relevant processor.
+ * Optionally, the order of the filter. Execution is done from lowest value to highest.
+ * It is a good practice to allow to configure this for the relevant filter.
*/
- int order();
+ public int order() {
+ return 0;
+ }
- /**
- * Should this processor also process external (non REST) requests, like plugin site requests.
- */
- boolean handleExternal();
+ @Override
+ public void close() throws ElasticSearchException {
+ // a no op
+ }
/**
- * Process the request, returning <tt>false</tt> if no further processing should be done. Note,
- * make sure to send a response if returning <tt>false</tt>, otherwise, no response will be sent.
- * <p/>
- * It is recommended that the process method will not do blocking calls, or heavily cache data
- * if a blocking call is done.
+ * Process the rest request. Using the channel to send a response, or the filter chain to continue
+ * processing the request.
*/
- boolean process(RestRequest request, RestChannel channel);
+ public abstract void process(RestRequest request, RestChannel channel, RestFilterChain filterChain);
}
diff --git a/src/main/java/org/elasticsearch/rest/RestFilterChain.java b/src/main/java/org/elasticsearch/rest/RestFilterChain.java
new file mode 100644
index 0000000000000..564e43af6f4cf
--- /dev/null
+++ b/src/main/java/org/elasticsearch/rest/RestFilterChain.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to ElasticSearch and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. ElasticSearch licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.rest;
+
+/**
+ * A filter chain allowing to continue and process the rest request.
+ */
+public interface RestFilterChain {
+
+ /**
+ * Continue processing the request. Should only be called if a response has not been sent
+ * through the channel.
+ */
+ void continueProcessing(RestRequest request, RestChannel channel);
+}
|
99bd4fbc657da545ecb40d8778f0979f233e0de9
|
orientdb
|
Created 2 new test case with creation of vertexes- and edges in not default clusters--
|
p
|
https://github.com/orientechnologies/orientdb
|
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLCreateVertexAndEdgeTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLCreateVertexAndEdgeTest.java
index 70faf948a0b..eda98967675 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLCreateVertexAndEdgeTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLCreateVertexAndEdgeTest.java
@@ -102,7 +102,7 @@ public void createEdgeDefaultClass() {
new OCommandSQL("create edge e1 cluster default from " + v3.getIdentity() + " to " + v5.getIdentity() + " set weight = 17"))
.execute();
Assert.assertEquals(e5.getClassName(), "E1");
- Assert.assertEquals(e5.getIdentity().getClusterId(), database.getDefaultClusterId());
+ //Assert.assertEquals(e5.getIdentity().getClusterId(), database.getDefaultClusterId());
// database.command(new OCommandSQL("drop class E1")).execute();
// database.command(new OCommandSQL("drop class V1")).execute();
|
0b3db7cbf5eada6176af266cc6bebfcd1332f115
|
restlet-framework-java
|
- Additional WADL refactorings.--
|
p
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet.ext.wadl_1.0/src/org/restlet/ext/wadl/FaultInfo.java b/modules/org.restlet.ext.wadl_1.0/src/org/restlet/ext/wadl/FaultInfo.java
index 0752ec6751..0ad154478f 100644
--- a/modules/org.restlet.ext.wadl_1.0/src/org/restlet/ext/wadl/FaultInfo.java
+++ b/modules/org.restlet.ext.wadl_1.0/src/org/restlet/ext/wadl/FaultInfo.java
@@ -32,6 +32,7 @@
import java.util.Iterator;
import java.util.List;
+import org.restlet.data.MediaType;
import org.restlet.data.Reference;
import org.restlet.data.Status;
import org.restlet.util.XmlWriter;
@@ -47,39 +48,66 @@ public class FaultInfo extends RepresentationInfo {
/**
* Constructor.
+ *
+ * @param status
+ * The associated status code.
*/
- public FaultInfo() {
+ public FaultInfo(Status status) {
super();
+ getStatuses().add(status);
}
/**
* Constructor with a single documentation element.
*
+ * @param status
+ * The associated status code.
* @param documentation
* A single documentation element.
*/
- public FaultInfo(DocumentationInfo documentation) {
+ public FaultInfo(Status status, DocumentationInfo documentation) {
super(documentation);
+ getStatuses().add(status);
}
/**
* Constructor with a list of documentation elements.
*
+ * @param status
+ * The associated status code.
* @param documentations
* The list of documentation elements.
*/
- public FaultInfo(List<DocumentationInfo> documentations) {
+ public FaultInfo(Status status, List<DocumentationInfo> documentations) {
super(documentations);
+ getStatuses().add(status);
}
/**
* Constructor with a single documentation element.
*
+ * @param status
+ * The associated status code.
* @param documentation
* A single documentation element.
*/
- public FaultInfo(String documentation) {
- super(documentation);
+ public FaultInfo(Status status, String documentation) {
+ this(status, new DocumentationInfo(documentation));
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param status
+ * The associated status code.
+ * @param mediaType
+ * The fault representation's media type.
+ * @param documentation
+ * A single documentation element.
+ */
+ public FaultInfo(Status status, MediaType mediaType, String documentation) {
+ this(status, new DocumentationInfo(documentation));
+ setMediaType(mediaType);
}
/**
diff --git a/modules/org.restlet.ext.wadl_1.0/src/org/restlet/ext/wadl/MethodInfo.java b/modules/org.restlet.ext.wadl_1.0/src/org/restlet/ext/wadl/MethodInfo.java
index 5ac681ffc5..0aac5ee5ab 100644
--- a/modules/org.restlet.ext.wadl_1.0/src/org/restlet/ext/wadl/MethodInfo.java
+++ b/modules/org.restlet.ext.wadl_1.0/src/org/restlet/ext/wadl/MethodInfo.java
@@ -32,8 +32,10 @@
import java.util.List;
import java.util.Map;
+import org.restlet.data.MediaType;
import org.restlet.data.Method;
import org.restlet.data.Reference;
+import org.restlet.data.Status;
import org.restlet.resource.Variant;
import org.restlet.util.XmlWriter;
import org.xml.sax.SAXException;
@@ -98,6 +100,24 @@ public MethodInfo(String documentation) {
super(documentation);
}
+ /**
+ * Adds a new fault to the response.
+ *
+ * @param status
+ * The associated status code.
+ * @param mediaType
+ * The fault representation's media type.
+ * @param documentation
+ * A single documentation element.
+ * @return The created fault description.
+ */
+ public FaultInfo addFault(Status status, MediaType mediaType,
+ String documentation) {
+ FaultInfo result = new FaultInfo(status, mediaType, documentation);
+ getResponse().getFaults().add(result);
+ return result;
+ }
+
/**
* Adds a new request parameter.
*
diff --git a/modules/org.restlet.ext.wadl_1.0/src/org/restlet/ext/wadl/WadlRepresentation.java b/modules/org.restlet.ext.wadl_1.0/src/org/restlet/ext/wadl/WadlRepresentation.java
index 9eadc8c8c6..d6af850a75 100644
--- a/modules/org.restlet.ext.wadl_1.0/src/org/restlet/ext/wadl/WadlRepresentation.java
+++ b/modules/org.restlet.ext.wadl_1.0/src/org/restlet/ext/wadl/WadlRepresentation.java
@@ -530,7 +530,8 @@ public void startElement(String uri, String localName, String qName,
}
pushState(State.DOCUMENTATION);
} else if (localName.equals("fault")) {
- this.currentFault = new FaultInfo();
+ this.currentFault = new FaultInfo(null);
+
if (attrs.getIndex("id") != -1) {
this.currentFault.setIdentifier(attrs.getValue("id"));
}
|
f26b7b2321ba561c947da92510e95e22f059351c
|
ReactiveX-RxJava
|
Conditionals: Fix all but 2 tests--
|
c
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-contrib/rxjava-computation-expressions/src/test/java/rx/operators/OperationConditionalsTest.java b/rxjava-contrib/rxjava-computation-expressions/src/test/java/rx/operators/OperationConditionalsTest.java
index f6c4f09ad0..44bb0c02c8 100644
--- a/rxjava-contrib/rxjava-computation-expressions/src/test/java/rx/operators/OperationConditionalsTest.java
+++ b/rxjava-contrib/rxjava-computation-expressions/src/test/java/rx/operators/OperationConditionalsTest.java
@@ -15,6 +15,7 @@
*/
package rx.operators;
+import static org.junit.Assert.*;
import static org.mockito.Matchers.*;
import static org.mockito.Mockito.*;
@@ -33,8 +34,10 @@
import rx.Observer;
import rx.Statement;
import rx.Subscription;
+import rx.observers.TestObserver;
import rx.schedulers.Schedulers;
import rx.schedulers.TestScheduler;
+import rx.util.functions.Action1;
import rx.util.functions.Func0;
public class OperationConditionalsTest {
@@ -108,7 +111,7 @@ public T call() {
<T> void observe(Observable<? extends T> source, T... values) {
Observer<T> o = mock(Observer.class);
- Subscription s = source.subscribe(o);
+ Subscription s = source.subscribe(new TestObserver<T>(o));
InOrder inOrder = inOrder(o);
@@ -127,7 +130,7 @@ <T> void observe(Observable<? extends T> source, T... values) {
<T> void observeSequence(Observable<? extends T> source, Iterable<? extends T> values) {
Observer<T> o = mock(Observer.class);
- Subscription s = source.subscribe(o);
+ Subscription s = source.subscribe(new TestObserver<T>(o));
InOrder inOrder = inOrder(o);
@@ -146,7 +149,7 @@ <T> void observeSequence(Observable<? extends T> source, Iterable<? extends T> v
<T> void observeError(Observable<? extends T> source, Class<? extends Throwable> error, T... valuesBeforeError) {
Observer<T> o = mock(Observer.class);
- Subscription s = source.subscribe(o);
+ Subscription s = source.subscribe(new TestObserver<T>(o));
InOrder inOrder = inOrder(o);
@@ -165,7 +168,7 @@ <T> void observeError(Observable<? extends T> source, Class<? extends Throwable>
<T> void observeSequenceError(Observable<? extends T> source, Class<? extends Throwable> error, Iterable<? extends T> valuesBeforeError) {
Observer<T> o = mock(Observer.class);
- Subscription s = source.subscribe(o);
+ Subscription s = source.subscribe(new TestObserver<T>(o));
InOrder inOrder = inOrder(o);
@@ -400,6 +403,7 @@ public Boolean call() {
@Test
public void testDoWhileManyTimes() {
+ fail("deadlocking");
Observable<Integer> source1 = Observable.from(1, 2, 3).subscribeOn(Schedulers.currentThread());
List<Integer> expected = new ArrayList<Integer>(numRecursion * 3);
|
9fcdb59352eb7bae875c460fbe6b50a4fdde3d45
|
intellij-community
|
[vcs-log] IDEA-125276 support regular- expressions in branch filter--
|
a
|
https://github.com/JetBrains/intellij-community
|
diff --git a/platform/vcs-log/api/src/com/intellij/vcs/log/VcsLogBranchFilter.java b/platform/vcs-log/api/src/com/intellij/vcs/log/VcsLogBranchFilter.java
index 90464d250f41d..e3c51b3dbe181 100644
--- a/platform/vcs-log/api/src/com/intellij/vcs/log/VcsLogBranchFilter.java
+++ b/platform/vcs-log/api/src/com/intellij/vcs/log/VcsLogBranchFilter.java
@@ -16,27 +16,20 @@
package com.intellij.vcs.log;
import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
import java.util.Collection;
+import java.util.regex.Pattern;
/**
* Tells to filter by branches with given names.
- * <p/>
- * There are two filters possible here:<ul>
- * <li>accept only given branches: {@link #getBranchNames()};</li>
- * <li>deny the given branches: {@link #getExcludedBranchNames()}</li></ul>
- * Note though that accepted branch names have higher precedence over excluded ones:
- * only those commits are excluded, which are contained <b>only</b> in excluded branches:
- * i.e. if a commit contains in an excluded branch, and in a non-excluded branch, then it should be shown.
- * <p/>
- * That means, in particular, that a filter with one accepted branch will show all and only commits from that branch,
- * and excluded branches will have no effect.
*/
public interface VcsLogBranchFilter extends VcsLogFilter {
+ boolean isShown(@NotNull String name);
- @NotNull
- Collection<String> getBranchNames();
+ @Nullable
+ String getSingleFilteredBranch();
@NotNull
- Collection<String> getExcludedBranchNames();
+ Collection<String> getTextPresentation();
}
diff --git a/platform/vcs-log/impl/src/com/intellij/vcs/log/data/VcsLogBranchFilterImpl.java b/platform/vcs-log/impl/src/com/intellij/vcs/log/data/VcsLogBranchFilterImpl.java
index d47ae38cce1ec..c7fb6e18f41f6 100644
--- a/platform/vcs-log/impl/src/com/intellij/vcs/log/data/VcsLogBranchFilterImpl.java
+++ b/platform/vcs-log/impl/src/com/intellij/vcs/log/data/VcsLogBranchFilterImpl.java
@@ -1,37 +1,153 @@
package com.intellij.vcs.log.data;
import com.intellij.openapi.util.text.StringUtil;
+import com.intellij.util.Function;
+import com.intellij.util.containers.ContainerUtil;
import com.intellij.vcs.log.VcsLogBranchFilter;
import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+import java.util.ArrayList;
import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.regex.Pattern;
public class VcsLogBranchFilterImpl implements VcsLogBranchFilter {
+ @NotNull private final List<String> myBranches;
+ @NotNull private final List<Pattern> myPatterns;
- @NotNull private final Collection<String> myBranchNames;
- @NotNull private final Collection<String> myExcludedBranchNames;
+ @NotNull private final List<String> myExcludedBranches;
+ @NotNull private final List<Pattern> myExcludedPatterns;
- public VcsLogBranchFilterImpl(@NotNull final Collection<String> branchNames, @NotNull Collection<String> excludedBranchNames) {
- myBranchNames = branchNames;
- myExcludedBranchNames = excludedBranchNames;
+ private VcsLogBranchFilterImpl(@NotNull List<String> branches,
+ @NotNull List<Pattern> patterns,
+ @NotNull List<String> excludedBranches,
+ @NotNull List<Pattern> excludedPatterns) {
+ myBranches = branches;
+ myPatterns = patterns;
+ myExcludedBranches = excludedBranches;
+ myExcludedPatterns = excludedPatterns;
+ }
+
+ @Deprecated
+ public VcsLogBranchFilterImpl(@NotNull Collection<String> branches,
+ @NotNull Collection<String> excludedBranches) {
+ myBranches = new ArrayList<String>(branches);
+ myPatterns = new ArrayList<Pattern>();
+ myExcludedBranches = new ArrayList<String>(excludedBranches);
+ myExcludedPatterns = new ArrayList<Pattern>();
+ }
+
+ @Nullable
+ public static VcsLogBranchFilterImpl fromBranch(@NotNull final String branchName) {
+ return new VcsLogBranchFilterImpl(Collections.singletonList(branchName),
+ Collections.<Pattern>emptyList(),
+ Collections.<String>emptyList(),
+ Collections.<Pattern>emptyList());
+ }
+
+ @Nullable
+ public static VcsLogBranchFilterImpl fromTextPresentation(@NotNull final Collection<String> strings) {
+ if (strings.isEmpty()) return null;
+
+ List<String> branches = new ArrayList<String>();
+ List<String> excludedBranches = new ArrayList<String>();
+ List<Pattern> patterns = new ArrayList<Pattern>();
+ List<Pattern> excludedPatterns = new ArrayList<Pattern>();
+
+ for (String string : strings) {
+ boolean isRegexp = isRegexp(string);
+ boolean isExcluded = string.startsWith("-");
+ string = isExcluded ? string.substring(1) : string;
+
+ if (isRegexp) {
+ if (isExcluded) {
+ excludedPatterns.add(Pattern.compile(string));
+ }
+ else {
+ patterns.add(Pattern.compile(string));
+ }
+ }
+ else {
+ if (isExcluded) {
+ excludedBranches.add(string);
+ }
+ else {
+ branches.add(string);
+ }
+ }
+ }
+
+ return new VcsLogBranchFilterImpl(branches, patterns, excludedBranches, excludedPatterns);
+ }
+
+ @NotNull
+ @Override
+ public Collection<String> getTextPresentation() {
+ List<String> result = new ArrayList<String>();
+
+ result.addAll(myBranches);
+ result.addAll(ContainerUtil.map(myPatterns, new Function<Pattern, String>() {
+ @Override
+ public String fun(Pattern pattern) {
+ return pattern.pattern();
+ }
+ }));
+
+ result.addAll(ContainerUtil.map(myExcludedBranches, new Function<String, String>() {
+ @Override
+ public String fun(String branchName) {
+ return "-" + branchName;
+ }
+ }));
+ result.addAll(ContainerUtil.map(myExcludedPatterns, new Function<Pattern, String>() {
+ @Override
+ public String fun(Pattern pattern) {
+ return "-" + pattern.pattern();
+ }
+ }));
+
+ return result;
}
@Override
public String toString() {
- return !myBranchNames.isEmpty()
- ? "on: " + StringUtil.join(myBranchNames, ", ")
- : "not on: " + StringUtil.join(myExcludedBranchNames, ", ");
+ return "on patterns: " + StringUtil.join(myPatterns, ", ") + "; branches: " + StringUtil.join(myBranches, ", ");
}
@Override
- @NotNull
- public Collection<String> getBranchNames() {
- return myBranchNames;
+ public boolean isShown(@NotNull String name) {
+ return isIncluded(name) && !isExcluded(name);
}
- @NotNull
+ private boolean isIncluded(@NotNull String name) {
+ if (myPatterns.isEmpty() && myBranches.isEmpty()) return true;
+ if (myBranches.contains(name)) return true;
+ for (Pattern regexp : myPatterns) {
+ if (regexp.matcher(name).matches()) return true;
+ }
+ return false;
+ }
+
+ private boolean isExcluded(@NotNull String name) {
+ if (myExcludedBranches.contains(name)) return true;
+ for (Pattern regexp : myExcludedPatterns) {
+ if (regexp.matcher(name).matches()) return true;
+ }
+ return false;
+ }
+
+ @Nullable
@Override
- public Collection<String> getExcludedBranchNames() {
- return myExcludedBranchNames;
+ public String getSingleFilteredBranch() {
+ if (!myPatterns.isEmpty()) return null;
+ if (myBranches.size() != 1) return null;
+ String branch = myBranches.get(0);
+ return isExcluded(branch) ? null : branch;
+ }
+
+ private static boolean isRegexp(@NotNull String pattern) {
+ return StringUtil.containsAnyChar(pattern, "()[]{}.*?+^$\\|");
}
}
diff --git a/platform/vcs-log/impl/src/com/intellij/vcs/log/data/VisiblePackBuilder.java b/platform/vcs-log/impl/src/com/intellij/vcs/log/data/VisiblePackBuilder.java
index 786faa8e91bcd..6de95c1da4343 100644
--- a/platform/vcs-log/impl/src/com/intellij/vcs/log/data/VisiblePackBuilder.java
+++ b/platform/vcs-log/impl/src/com/intellij/vcs/log/data/VisiblePackBuilder.java
@@ -143,14 +143,11 @@ private Set<Integer> getMatchingHeads(@NotNull VcsLogRefs refs, @NotNull Set<Vir
return new HashSet<Integer>(ContainerUtil.intersection(filteredByBranch, filteredByFile));
}
- private Set<Integer> getMatchingHeads(@NotNull VcsLogRefs refs, @NotNull VcsLogBranchFilter filter) {
- final Collection<String> branchNames = new HashSet<String>(filter.getBranchNames());
- final Collection<String> excludedBranches = new HashSet<String>(filter.getExcludedBranchNames());
- final boolean filterByAcceptance = !filter.getBranchNames().isEmpty();
+ private Set<Integer> getMatchingHeads(@NotNull VcsLogRefs refs, @NotNull final VcsLogBranchFilter filter) {
return new HashSet<Integer>(ContainerUtil.mapNotNull(refs.getBranches(), new Function<VcsRef, Integer>() {
@Override
public Integer fun(@NotNull VcsRef ref) {
- boolean acceptRef = filterByAcceptance ? branchNames.contains(ref.getName()) : !excludedBranches.contains(ref.getName());
+ boolean acceptRef = filter.isShown(ref.getName());
return acceptRef ? myHashMap.getCommitIndex(ref.getCommitHash()) : null;
}
}));
diff --git a/platform/vcs-log/impl/src/com/intellij/vcs/log/ui/CurrentBranchHighlighter.java b/platform/vcs-log/impl/src/com/intellij/vcs/log/ui/CurrentBranchHighlighter.java
index 3b630295f1929..188fae696842b 100644
--- a/platform/vcs-log/impl/src/com/intellij/vcs/log/ui/CurrentBranchHighlighter.java
+++ b/platform/vcs-log/impl/src/com/intellij/vcs/log/ui/CurrentBranchHighlighter.java
@@ -17,7 +17,6 @@
import com.intellij.openapi.util.Condition;
import com.intellij.ui.JBColor;
-import com.intellij.util.containers.ContainerUtil;
import com.intellij.vcs.log.*;
import com.intellij.vcs.log.data.LoadingDetails;
import com.intellij.vcs.log.data.VcsLogDataHolder;
@@ -61,7 +60,7 @@ public VcsCommitStyle getStyle(int commitIndex, boolean isSelected) {
}
private boolean isFilteredByCurrentBranch(@NotNull String currentBranch, @NotNull VcsLogBranchFilter branchFilter) {
- return branchFilter.getBranchNames().size() == 1 && currentBranch.equals(ContainerUtil.getFirstItem(branchFilter.getBranchNames()));
+ return currentBranch.equals(branchFilter.getSingleFilteredBranch());
}
public static class Factory implements VcsLogHighlighterFactory {
diff --git a/platform/vcs-log/impl/src/com/intellij/vcs/log/ui/filter/BranchFilterPopupComponent.java b/platform/vcs-log/impl/src/com/intellij/vcs/log/ui/filter/BranchFilterPopupComponent.java
index 2a85ae67add42..115bf3028d29c 100644
--- a/platform/vcs-log/impl/src/com/intellij/vcs/log/ui/filter/BranchFilterPopupComponent.java
+++ b/platform/vcs-log/impl/src/com/intellij/vcs/log/ui/filter/BranchFilterPopupComponent.java
@@ -42,51 +42,26 @@ public BranchFilterPopupComponent(@NotNull VcsLogUiProperties uiProperties,
@NotNull
@Override
protected String getText(@NotNull VcsLogBranchFilter filter) {
- boolean positiveMatch = !filter.getBranchNames().isEmpty();
- Collection<String> names = positiveMatch ? filter.getBranchNames() : addMinusPrefix(filter.getExcludedBranchNames());
- return displayableText(names);
+ return displayableText(getTextValues(filter));
}
@Nullable
@Override
protected String getToolTip(@NotNull VcsLogBranchFilter filter) {
- boolean positiveMatch = !filter.getBranchNames().isEmpty();
- Collection<String> names = positiveMatch ? filter.getBranchNames() : filter.getExcludedBranchNames();
- String tooltip = tooltip(names);
- return positiveMatch ? tooltip : "not in " + tooltip;
+ return tooltip(getTextValues(filter));
}
- @NotNull
+ @Nullable
@Override
protected VcsLogBranchFilter createFilter(@NotNull Collection<String> values) {
- Collection<String> acceptedBranches = ContainerUtil.newArrayList();
- Collection<String> excludedBranches = ContainerUtil.newArrayList();
- for (String value : values) {
- if (value.startsWith("-")) {
- excludedBranches.add(value.substring(1));
- }
- else {
- acceptedBranches.add(value);
- }
- }
- return new VcsLogBranchFilterImpl(acceptedBranches, excludedBranches);
+ return VcsLogBranchFilterImpl.fromTextPresentation(values);
}
@Override
@NotNull
protected Collection<String> getTextValues(@Nullable VcsLogBranchFilter filter) {
if (filter == null) return Collections.emptySet();
- return ContainerUtil.newArrayList(ContainerUtil.concat(filter.getBranchNames(), addMinusPrefix(filter.getExcludedBranchNames())));
- }
-
- @NotNull
- private static List<String> addMinusPrefix(@NotNull Collection<String> branchNames) {
- return ContainerUtil.map(branchNames, new Function<String, String>() {
- @Override
- public String fun(String branchName) {
- return "-" + branchName;
- }
- });
+ return filter.getTextPresentation();
}
@Override
diff --git a/platform/vcs-log/impl/src/com/intellij/vcs/log/ui/filter/MultipleValueFilterPopupComponent.java b/platform/vcs-log/impl/src/com/intellij/vcs/log/ui/filter/MultipleValueFilterPopupComponent.java
index 4b90f4bfbfd9a..795dd837dddb7 100644
--- a/platform/vcs-log/impl/src/com/intellij/vcs/log/ui/filter/MultipleValueFilterPopupComponent.java
+++ b/platform/vcs-log/impl/src/com/intellij/vcs/log/ui/filter/MultipleValueFilterPopupComponent.java
@@ -54,7 +54,7 @@ abstract class MultipleValueFilterPopupComponent<Filter extends VcsLogFilter> ex
@NotNull
protected abstract List<String> getAllValues();
- @NotNull
+ @Nullable
protected abstract Filter createFilter(@NotNull Collection<String> values);
@NotNull
diff --git a/platform/vcs-log/impl/test/com/intellij/vcs/log/data/VisiblePackBuilderTest.kt b/platform/vcs-log/impl/test/com/intellij/vcs/log/data/VisiblePackBuilderTest.kt
index 7cd1b8b7bb308..d2dd774aa0c62 100644
--- a/platform/vcs-log/impl/test/com/intellij/vcs/log/data/VisiblePackBuilderTest.kt
+++ b/platform/vcs-log/impl/test/com/intellij/vcs/log/data/VisiblePackBuilderTest.kt
@@ -82,7 +82,7 @@ class VisiblePackBuilderTest {
3(4)
4()
}
- val visiblePack = graph.build(filters(VcsLogBranchFilterImpl(setOf(), setOf("master"))))
+ val visiblePack = graph.build(filters(VcsLogBranchFilterImpl.fromTextPresentation(setOf("-master"))))
val visibleGraph = visiblePack.getVisibleGraph()
assertEquals(3, visibleGraph.getVisibleCommitCount())
assertDoesNotContain(visibleGraph, 1)
@@ -109,7 +109,7 @@ class VisiblePackBuilderTest {
}
graph.providers.entrySet().iterator().next().getValue().setFilteredCommitsProvider(func)
- val visiblePack = graph.build(filters(VcsLogBranchFilterImpl(setOf(), setOf("master")), userFilter(DEFAULT_USER)))
+ val visiblePack = graph.build(filters(VcsLogBranchFilterImpl.fromTextPresentation(setOf("-master")), userFilter(DEFAULT_USER)))
val visibleGraph = visiblePack.getVisibleGraph()
assertEquals(3, visibleGraph.getVisibleCommitCount())
assertDoesNotContain(visibleGraph, 1)
@@ -181,7 +181,7 @@ class VisiblePackBuilderTest {
= VcsLogFilterCollectionImpl(branchFilter(branch), userFilter(user), null, null, null, null, null)
fun branchFilter(branch: List<String>?): VcsLogBranchFilterImpl? {
- return if (branch != null) VcsLogBranchFilterImpl(branch, setOf()) else null
+ return if (branch != null) VcsLogBranchFilterImpl.fromTextPresentation(branch) else null
}
fun userFilter(user: VcsUser?): VcsLogUserFilter? {
diff --git a/plugins/git4idea/src/git4idea/branch/DeepComparator.java b/plugins/git4idea/src/git4idea/branch/DeepComparator.java
index c1d3b31a5ec02..53a66eba96cc8 100644
--- a/plugins/git4idea/src/git4idea/branch/DeepComparator.java
+++ b/plugins/git4idea/src/git4idea/branch/DeepComparator.java
@@ -89,9 +89,7 @@ public void onChange(@NotNull VcsLogDataPack dataPack, boolean refreshHappened)
}
else {
VcsLogBranchFilter branchFilter = myUi.getFilterUi().getFilters().getBranchFilter();
- if (branchFilter == null ||
- branchFilter.getBranchNames().size() != 1 ||
- !branchFilter.getBranchNames().iterator().next().equals(myTask.myComparedBranch)) {
+ if (branchFilter == null || !myTask.myComparedBranch.equals(branchFilter.getSingleFilteredBranch())) {
stopAndUnhighlight();
}
}
diff --git a/plugins/git4idea/src/git4idea/branch/DeepCompareAction.java b/plugins/git4idea/src/git4idea/branch/DeepCompareAction.java
index 10f27c6160779..90e585f27ca00 100644
--- a/plugins/git4idea/src/git4idea/branch/DeepCompareAction.java
+++ b/plugins/git4idea/src/git4idea/branch/DeepCompareAction.java
@@ -64,18 +64,18 @@ public void setSelected(AnActionEvent e, boolean selected) {
final DeepComparator dc = DeepComparator.getInstance(project, ui);
if (selected) {
VcsLogBranchFilter branchFilter = ui.getFilterUi().getFilters().getBranchFilter();
- if (branchFilter == null || branchFilter.getBranchNames().size() != 1) {
+ String singleBranchName = branchFilter != null ? branchFilter.getSingleFilteredBranch() : null;
+ if (singleBranchName == null) {
selectBranchAndPerformAction(ui.getDataPack(), e, new Consumer<String>() {
@Override
public void consume(String selectedBranch) {
- ui.getFilterUi().setFilter(new VcsLogBranchFilterImpl(Collections.singleton(selectedBranch), Collections.<String>emptySet()));
+ ui.getFilterUi().setFilter(VcsLogBranchFilterImpl.fromBranch(selectedBranch));
dc.highlightInBackground(selectedBranch, dataProvider);
}
}, getAllVisibleRoots(ui));
return;
}
- String branchToCompare = branchFilter.getBranchNames().iterator().next();
- dc.highlightInBackground(branchToCompare, dataProvider);
+ dc.highlightInBackground(singleBranchName, dataProvider);
}
else {
dc.stopAndUnhighlight();
diff --git a/plugins/git4idea/src/git4idea/log/GitLogProvider.java b/plugins/git4idea/src/git4idea/log/GitLogProvider.java
index 16a42c505dfd8..99fa4f3f829dc 100644
--- a/plugins/git4idea/src/git4idea/log/GitLogProvider.java
+++ b/plugins/git4idea/src/git4idea/log/GitLogProvider.java
@@ -405,17 +405,37 @@ public List<TimedVcsCommit> getCommitsMatchingFilter(@NotNull final VirtualFile
List<String> filterParameters = ContainerUtil.newArrayList();
- if (filterCollection.getBranchFilter() != null && !filterCollection.getBranchFilter().getBranchNames().isEmpty()) {
+ VcsLogBranchFilter branchFilter = filterCollection.getBranchFilter();
+ if (branchFilter != null) {
GitRepository repository = getRepository(root);
assert repository != null : "repository is null for root " + root + " but was previously reported as 'ready'";
+ Collection<GitLocalBranch> localBranches = repository.getBranches().getLocalBranches();
+ Collection<String> localBranchNames = ContainerUtil.map(localBranches, new Function<GitLocalBranch, String>() {
+ @Override
+ public String fun(GitLocalBranch branch) {
+ return branch.getName();
+ }
+ });
+
+ Collection<GitRemoteBranch> remoteBranches = repository.getBranches().getRemoteBranches();
+ Collection<String> remoteBranchNames = ContainerUtil.map(remoteBranches, new Function<GitRemoteBranch, String>() {
+ @Override
+ public String fun(GitRemoteBranch branch) {
+ return branch.getNameForLocalOperations();
+ }
+ });
+
+ Collection<String> predefinedNames = ContainerUtil.list("HEAD");
+
boolean atLeastOneBranchExists = false;
- for (String branchName : filterCollection.getBranchFilter().getBranchNames()) {
- if (branchName.equals("HEAD") || repository.getBranches().findBranchByName(branchName) != null) {
+ for (String branchName: ContainerUtil.concat(localBranchNames, remoteBranchNames, predefinedNames)) {
+ if (branchFilter.isShown(branchName)) {
filterParameters.add(branchName);
atLeastOneBranchExists = true;
}
}
+
if (!atLeastOneBranchExists) { // no such branches in this repository => filter matches nothing
return Collections.emptyList();
}
diff --git a/plugins/git4idea/tests/git4idea/log/GitLogProviderTest.java b/plugins/git4idea/tests/git4idea/log/GitLogProviderTest.java
index 9501ddc03ce6e..6a2c5a8eff194 100644
--- a/plugins/git4idea/tests/git4idea/log/GitLogProviderTest.java
+++ b/plugins/git4idea/tests/git4idea/log/GitLogProviderTest.java
@@ -166,14 +166,14 @@ public boolean value(VcsRef ref) {
public void test_filter_by_branch() throws Exception {
List<String> hashes = generateHistoryForFilters(true);
- VcsLogBranchFilter branchFilter = new VcsLogBranchFilterImpl(singleton("feature"), Collections.<String>emptySet());
+ VcsLogBranchFilter branchFilter = VcsLogBranchFilterImpl.fromBranch("feature");
List<String> actualHashes = getFilteredHashes(branchFilter, null);
assertEquals(hashes, actualHashes);
}
public void test_filter_by_branch_and_user() throws Exception {
List<String> hashes = generateHistoryForFilters(false);
- VcsLogBranchFilter branchFilter = new VcsLogBranchFilterImpl(singleton("feature"), Collections.<String>emptySet());
+ VcsLogBranchFilter branchFilter = VcsLogBranchFilterImpl.fromBranch("feature");
VcsLogUserFilter userFilter = new VcsLogUserFilterImpl(singleton(GitTestUtil.USER_NAME), Collections.<VirtualFile, VcsUser>emptyMap(),
Collections.<VcsUser>emptySet());
List<String> actualHashes = getFilteredHashes(branchFilter, userFilter);
diff --git a/plugins/hg4idea/src/org/zmlx/hg4idea/log/HgLogProvider.java b/plugins/hg4idea/src/org/zmlx/hg4idea/log/HgLogProvider.java
index ea99a67bdd194..e1738be9552ac 100644
--- a/plugins/hg4idea/src/org/zmlx/hg4idea/log/HgLogProvider.java
+++ b/plugins/hg4idea/src/org/zmlx/hg4idea/log/HgLogProvider.java
@@ -176,31 +176,38 @@ public void update(Project project, @Nullable VirtualFile root) {
@NotNull
@Override
public List<TimedVcsCommit> getCommitsMatchingFilter(@NotNull final VirtualFile root,
- @NotNull VcsLogFilterCollection filterCollection,
- int maxCount) throws VcsException {
+ @NotNull VcsLogFilterCollection filterCollection,
+ int maxCount) throws VcsException {
List<String> filterParameters = ContainerUtil.newArrayList();
// branch filter and user filter may be used several times without delimiter
- if (filterCollection.getBranchFilter() != null && !filterCollection.getBranchFilter().getBranchNames().isEmpty()) {
+ VcsLogBranchFilter branchFilter = filterCollection.getBranchFilter();
+ if (branchFilter != null) {
HgRepository repository = myRepositoryManager.getRepositoryForRoot(root);
if (repository == null) {
LOG.error("Repository not found for root " + root);
return Collections.emptyList();
}
+ Collection<String> branchNames = repository.getBranches().keySet();
+ Collection<String> bookmarkNames = HgUtil.getNamesWithoutHashes(repository.getBookmarks());
+ Collection<String> predefinedNames = ContainerUtil.list(TIP_REFERENCE);
+
boolean atLeastOneBranchExists = false;
- for (String branchName : filterCollection.getBranchFilter().getBranchNames()) {
- if (branchName.equals(TIP_REFERENCE) || branchExists(repository, branchName)) {
+ for (String branchName : ContainerUtil.concat(branchNames, bookmarkNames, predefinedNames)) {
+ if (branchFilter.isShown(branchName)) {
filterParameters.add(HgHistoryUtil.prepareParameter("branch", branchName));
atLeastOneBranchExists = true;
}
- else if (branchName.equals(HEAD_REFERENCE)) {
- filterParameters.add(HgHistoryUtil.prepareParameter("branch", "."));
- filterParameters.add("-r");
- filterParameters.add("::."); //all ancestors for current revision;
- atLeastOneBranchExists = true;
- }
}
+
+ if (branchFilter.isShown(HEAD_REFERENCE)) {
+ filterParameters.add(HgHistoryUtil.prepareParameter("branch", "."));
+ filterParameters.add("-r");
+ filterParameters.add("::."); //all ancestors for current revision;
+ atLeastOneBranchExists = true;
+ }
+
if (!atLeastOneBranchExists) { // no such branches => filter matches nothing
return Collections.emptyList();
}
@@ -287,10 +294,4 @@ public String getCurrentBranch(@NotNull VirtualFile root) {
public <T> T getPropertyValue(VcsLogProperties.VcsLogProperty<T> property) {
return null;
}
-
- private static boolean branchExists(@NotNull HgRepository repository, @NotNull String branchName) {
- return repository.getBranches().keySet().contains(branchName) ||
- HgUtil.getNamesWithoutHashes(repository.getBookmarks()).contains(branchName);
- }
-
}
|
e3b68ea468af79dde5cd7e27f80731ec9286a45b
|
drools
|
JBRULES-737: fixing problem when inspecting classes- with static initializers--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@10190 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-core/src/main/java/org/drools/util/asm/ClassFieldInspector.java b/drools-core/src/main/java/org/drools/util/asm/ClassFieldInspector.java
index d4cc8b98c7f..ff1802770c9 100644
--- a/drools-core/src/main/java/org/drools/util/asm/ClassFieldInspector.java
+++ b/drools-core/src/main/java/org/drools/util/asm/ClassFieldInspector.java
@@ -128,6 +128,7 @@ private void processClassWithoutByteCode(Class clazz,
if ( (( methods[i].getModifiers() & mask ) == Modifier.PUBLIC ) &&
( methods[i].getParameterTypes().length == 0) &&
( !methods[i].getName().equals( "<init>" )) &&
+ //( !methods[i].getName().equals( "<clinit>" )) &&
(methods[i].getReturnType() != void.class) ) {
final int fieldIndex = this.methods.size();
addToMapping( methods[i],
@@ -262,7 +263,9 @@ public MethodVisitor visitMethod(final int access,
//and have no args, and return a value
final int mask = this.includeFinalMethods ? Opcodes.ACC_PUBLIC : Opcodes.ACC_PUBLIC | Opcodes.ACC_FINAL;
if ( (access & mask) == Opcodes.ACC_PUBLIC ) {
- if ( desc.startsWith( "()" ) && !(name.equals( "<init>" )) ) {// && ( name.startsWith("get") || name.startsWith("is") ) ) {
+ if ( desc.startsWith( "()" ) &&
+ ( ! name.equals( "<init>" ) ) /*&&
+ ( ! name.equals( "<clinit>" ) ) */) {// && ( name.startsWith("get") || name.startsWith("is") ) ) {
try {
final Method method = this.clazz.getMethod( name,
(Class[]) null );
diff --git a/drools-core/src/test/java/org/drools/Cheese.java b/drools-core/src/test/java/org/drools/Cheese.java
index 36ddfd7b50d..80e98335337 100644
--- a/drools-core/src/test/java/org/drools/Cheese.java
+++ b/drools-core/src/test/java/org/drools/Cheese.java
@@ -23,6 +23,14 @@
public class Cheese
implements
CheeseInterface {
+
+ public static String staticString;
+
+ static {
+ staticString = "Cheese is tasty";
+ }
+
+
private String type;
private int price;
diff --git a/drools-core/src/test/java/org/drools/util/asm/ClassFieldInspectorTest.java b/drools-core/src/test/java/org/drools/util/asm/ClassFieldInspectorTest.java
index 4803a4b90c4..e0f2c7f6986 100644
--- a/drools-core/src/test/java/org/drools/util/asm/ClassFieldInspectorTest.java
+++ b/drools-core/src/test/java/org/drools/util/asm/ClassFieldInspectorTest.java
@@ -212,10 +212,15 @@ public void bas() {
}
static class Person {
+ public static String aStaticString;
private boolean happy;
private String name;
private int age;
private String URI;
+
+ static {
+ aStaticString = "A static String";
+ }
public int getAge() {
return this.age;
|
7ed444678012a695278f54ab86ca4a5dd73537c7
|
intellij-community
|
one more test that fails; some more code moved- to common framework--
|
p
|
https://github.com/JetBrains/intellij-community
|
diff --git a/plugins/svn4idea/testSource/org/jetbrains/idea/svn/SvnDeleteTest.java b/plugins/svn4idea/testSource/org/jetbrains/idea/svn/SvnDeleteTest.java
new file mode 100644
index 0000000000000..809bb1bc917da
--- /dev/null
+++ b/plugins/svn4idea/testSource/org/jetbrains/idea/svn/SvnDeleteTest.java
@@ -0,0 +1,27 @@
+package org.jetbrains.idea.svn;
+
+import com.intellij.openapi.vcs.VcsConfiguration;
+import com.intellij.openapi.vfs.VirtualFile;
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ * @author yole
+ */
+public class SvnDeleteTest extends SvnTestCase {
+ // IDEADEV-16066
+ @Test
+ @Ignore
+ public void testDeletePackage() throws Exception {
+ enableSilentOperation(VcsConfiguration.StandardConfirmation.ADD);
+ enableSilentOperation(VcsConfiguration.StandardConfirmation.REMOVE);
+ VirtualFile dir = createDirInCommand(myWorkingCopyDir, "child");
+ createFileInCommand(dir, "a.txt", "content");
+
+ verify(runSvn("status"), "A child", "A child\\a.txt");
+ checkin();
+
+ deleteFileInCommand(dir);
+ verify(runSvn("status"), "D child", "D child\\a.txt");
+ }
+}
\ No newline at end of file
diff --git a/plugins/svn4idea/testSource/org/jetbrains/idea/svn/SvnRenameTest.java b/plugins/svn4idea/testSource/org/jetbrains/idea/svn/SvnRenameTest.java
index 450ad487f63f1..dea7e4f5bfe66 100644
--- a/plugins/svn4idea/testSource/org/jetbrains/idea/svn/SvnRenameTest.java
+++ b/plugins/svn4idea/testSource/org/jetbrains/idea/svn/SvnRenameTest.java
@@ -2,10 +2,8 @@
import com.intellij.openapi.vcs.VcsConfiguration;
import com.intellij.openapi.vfs.VirtualFile;
-import org.junit.Test;
import org.junit.Ignore;
-
-import java.io.IOException;
+import org.junit.Test;
/**
* @author yole
@@ -21,10 +19,6 @@ public void testSimpleRename() throws Exception {
verify(runSvn("status"), "A + b.txt", "D a.txt");
}
- private void checkin() throws IOException {
- verify(runSvn("ci", "-m", "test"));
- }
-
// IDEADEV-18844
@Test
@Ignore
diff --git a/plugins/svn4idea/testSource/org/jetbrains/idea/svn/SvnTestCase.java b/plugins/svn4idea/testSource/org/jetbrains/idea/svn/SvnTestCase.java
index 7b3ec28c8f834..e70a8e54534e8 100644
--- a/plugins/svn4idea/testSource/org/jetbrains/idea/svn/SvnTestCase.java
+++ b/plugins/svn4idea/testSource/org/jetbrains/idea/svn/SvnTestCase.java
@@ -68,4 +68,8 @@ protected RunResult runSvn(String... commandLine) throws IOException {
protected void enableSilentOperation(final VcsConfiguration.StandardConfirmation op) {
enableSilentOperation(SvnVcs.VCS_NAME, op);
}
+
+ protected void checkin() throws IOException {
+ verify(runSvn("ci", "-m", "test"));
+ }
}
|
079b856a6c9ca5abbf788c9eb94535139e301cda
|
spring-framework
|
javadoc--
|
p
|
https://github.com/spring-projects/spring-framework
|
diff --git a/org.springframework.context/src/main/java/org/springframework/ui/message/DefaultMessageResolver.java b/org.springframework.context/src/main/java/org/springframework/ui/message/DefaultMessageResolver.java
index b79f3d3f2d16..7bf1eb11e1e8 100644
--- a/org.springframework.context/src/main/java/org/springframework/ui/message/DefaultMessageResolver.java
+++ b/org.springframework.context/src/main/java/org/springframework/ui/message/DefaultMessageResolver.java
@@ -95,7 +95,7 @@ public String toString() {
defaultText).toString();
}
- static class TextMessage implements Message {
+ private static class TextMessage implements Message {
private Severity severity;
@@ -116,7 +116,7 @@ public String getText() {
}
- static class MessageSourceResolvableAccessor implements PropertyAccessor {
+ private static class MessageSourceResolvableAccessor implements PropertyAccessor {
private MessageSource messageSource;
@@ -140,12 +140,11 @@ public boolean canWrite(EvaluationContext context, Object target, String name) t
return false;
}
- @SuppressWarnings("unchecked")
public void write(EvaluationContext context, Object target, String name, Object newValue) throws AccessException {
throw new UnsupportedOperationException("Should not be called");
}
- public Class[] getSpecificTargetClasses() {
+ public Class<?>[] getSpecificTargetClasses() {
return new Class[] { MessageSourceResolvable.class };
}
diff --git a/org.springframework.context/src/main/java/org/springframework/ui/message/Message.java b/org.springframework.context/src/main/java/org/springframework/ui/message/Message.java
index 48d5fbb34016..c9edd4aa7716 100644
--- a/org.springframework.context/src/main/java/org/springframework/ui/message/Message.java
+++ b/org.springframework.context/src/main/java/org/springframework/ui/message/Message.java
@@ -16,18 +16,16 @@
package org.springframework.ui.message;
/**
- * Communicates information about an event to the user.
- * For example, a validation message may inform a web application user a business rule was violated.
- * A message is attached to a receiving element, has text providing the basis for communication,
- * and has severity indicating the priority or intensity of the message for its receiver.
- *
+ * Communicates information of interest to the user.
+ * For example, a error message may inform a user of a web application a business rule was violated.
+ * TODO - should we introduce summary/detail fields instead of just text
* @author Keith Donald
*/
public interface Message {
/**
* The severity of this message.
- * The severity indicates the intensity or priority of the communication.
+ * The severity indicates the intensity or priority of the message.
* @return the message severity
*/
public Severity getSeverity();
diff --git a/org.springframework.context/src/main/java/org/springframework/ui/message/MessageBuilder.java b/org.springframework.context/src/main/java/org/springframework/ui/message/MessageBuilder.java
index 2394bddcc01c..d0c4fbeb2b02 100644
--- a/org.springframework.context/src/main/java/org/springframework/ui/message/MessageBuilder.java
+++ b/org.springframework.context/src/main/java/org/springframework/ui/message/MessageBuilder.java
@@ -22,31 +22,33 @@
import org.springframework.context.MessageSource;
import org.springframework.context.MessageSourceResolvable;
-import org.springframework.context.expression.MapAccessor;
import org.springframework.core.style.ToStringCreator;
import org.springframework.expression.ExpressionParser;
import org.springframework.expression.spel.standard.SpelExpressionParser;
-import org.springframework.expression.spel.support.StandardEvaluationContext;
/**
- * A convenient builder for building {@link MessageResolver} objects programmatically.
- * Often used by model code such as validation logic to conveniently record validation messages.
- * Supports the production of message resolvers that hard-code their message text,
- * as well as message resolvers that retrieve their text from a {@link MessageSource}.
- *
- * Usage example:
+ * A builder for building {@link MessageResolver} objects.
+ * Typically used by Controllers to {@link MessageContext#add(MessageResolver, String) add} messages to display in a user interface.
+ * Supports MessageResolvers that hard-code the message text, as well as MessageResolvers that resolve the message text from a localized {@link MessageSource}.
+ * Also supports named arguments whose values can be inserted into messages using #{eval expressions}.
* <p>
+ * Usage example:
* <pre>
* new MessageBuilder().
* severity(Severity.ERROR).
- * code("invalidFormat").
- * arg("mathForm.decimalField").
- * arg("#,###.##").
- * defaultText("The decimal field must be in format #,###.##").
+ * code("invalidFormat").
+ * resolvableArg("label", "mathForm.decimalField").
+ * arg("format", "#,###.##").
+ * defaultText("The decimal field must be in format #,###.##").
* build();
* </pre>
- * </p>
+ * Example messages.properties loaded by the MessageSource:
+ * <pre>
+ * invalidFormat=The #{label} must be in format #{format}.
+ * mathForm.decimalField=Decimal Field
+ * </pre>
* @author Keith Donald
+ * @see MessageContext#add(MessageResolver, String)
*/
public class MessageBuilder {
@@ -70,9 +72,9 @@ public MessageBuilder severity(Severity severity) {
}
/**
- * Add a message code to use to resolve the message text.
+ * Add a code to use to resolve the template for generating the localized message text.
* Successive calls to this method add additional codes.
- * Codes are applied in the order they are added.
+ * Codes are tried in the order they are added.
* @param code the message code
* @return this, for fluent API usage
*/
@@ -82,8 +84,10 @@ public MessageBuilder code(String code) {
}
/**
- * Add a message argument.
- * Successive calls to this method add additional args.
+ * Add a message argument to insert into the message text.
+ * Named message arguments are inserted by eval expressions denoted within the resolved message template.
+ * For example, the value of the 'format' argument would be inserted where a corresponding #{format} expression is defined in the message template.
+ * Successive calls to this method add additional arguments.
* @param name the argument name
* @param value the argument value
* @return this, for fluent API usage
@@ -94,14 +98,14 @@ public MessageBuilder arg(String name, Object value) {
}
/**
- * Add a message argument whose value is a resolvable message code.
- * Successive calls to this method add additional resolvable arguements.
+ * Add a message argument to insert into the message text, where the actual value to be inserted should be resolved by the {@link MessageSource}.
+ * Successive calls to this method add additional resolvable arguments.
* @param name the argument name
- * @param value the argument value
+ * @param code the code to use to resolve the argument value
* @return this, for fluent API usage
*/
- public MessageBuilder resolvableArg(String name, Object value) {
- args.put(name, new ResolvableArgumentValue(value));
+ public MessageBuilder resolvableArg(String name, Object code) {
+ args.put(name, new ResolvableArgumentValue(code));
return this;
}
@@ -135,12 +139,12 @@ public MessageResolver build() {
return new DefaultMessageResolver(severity, codesArray, args, defaultText, expressionParser);
}
- static class ResolvableArgumentValue implements MessageSourceResolvable {
+ private static class ResolvableArgumentValue implements MessageSourceResolvable {
- private Object value;
+ private Object code;
- public ResolvableArgumentValue(Object value) {
- this.value = value;
+ public ResolvableArgumentValue(Object code) {
+ this.code = code;
}
public Object[] getArguments() {
@@ -148,15 +152,15 @@ public Object[] getArguments() {
}
public String[] getCodes() {
- return new String[] { value.toString() };
+ return new String[] { code.toString() };
}
public String getDefaultMessage() {
- return String.valueOf(value);
+ return String.valueOf(code);
}
public String toString() {
- return new ToStringCreator(this).append("value", value).toString();
+ return new ToStringCreator(this).append("code", code).toString();
}
}
diff --git a/org.springframework.context/src/main/java/org/springframework/ui/message/MessageResolutionException.java b/org.springframework.context/src/main/java/org/springframework/ui/message/MessageResolutionException.java
index a8a8d9c09734..39a418575e99 100644
--- a/org.springframework.context/src/main/java/org/springframework/ui/message/MessageResolutionException.java
+++ b/org.springframework.context/src/main/java/org/springframework/ui/message/MessageResolutionException.java
@@ -1,7 +1,32 @@
+/*
+ * Copyright 2004-2009 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package org.springframework.ui.message;
+/**
+ * Runtime exception thrown by a {@link MessageResolver} if a message resolution fails.
+ * @author Keith Donald
+ */
+@SuppressWarnings("serial")
public class MessageResolutionException extends RuntimeException {
+ /**
+ * Creates a new message resolution exception.
+ * @param message a messaging describing the failure
+ * @param cause the cause of the failure
+ */
public MessageResolutionException(String message, Throwable cause) {
super(message, cause);
}
diff --git a/org.springframework.context/src/main/java/org/springframework/ui/message/MessageResolver.java b/org.springframework.context/src/main/java/org/springframework/ui/message/MessageResolver.java
index b8113562b9e7..38bebae21258 100644
--- a/org.springframework.context/src/main/java/org/springframework/ui/message/MessageResolver.java
+++ b/org.springframework.context/src/main/java/org/springframework/ui/message/MessageResolver.java
@@ -20,9 +20,7 @@
import org.springframework.context.MessageSource;
/**
- * A factory for a Message. Allows a Message to be internationalized and to be resolved from a
- * {@link MessageSource message resource bundle}.
- *
+ * A factory for a localized Message.
* @author Keith Donald
* @see Message
* @see MessageSource
@@ -30,10 +28,11 @@
public interface MessageResolver {
/**
- * Resolve the message from the message source using the current locale.
+ * Resolve the message from the message source for the locale.
* @param messageSource the message source, an abstraction for a resource bundle
- * @param locale the current locale of this request
+ * @param locale the locale of this request
* @return the resolved message
+ * @throws MessageResolutionException if a resolution failure occurs
*/
public Message resolveMessage(MessageSource messageSource, Locale locale);
}
|
d7d4a1410bffb969850bb17ba0d48b02ad542136
|
camel
|
CAMEL-1370 caching the StreamSource by caching- the inputStream or reader--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@746872 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/main/java/org/apache/camel/converter/stream/StreamCacheConverter.java b/camel-core/src/main/java/org/apache/camel/converter/stream/StreamCacheConverter.java
index a78903e66b890..c081193c4489d 100644
--- a/camel-core/src/main/java/org/apache/camel/converter/stream/StreamCacheConverter.java
+++ b/camel-core/src/main/java/org/apache/camel/converter/stream/StreamCacheConverter.java
@@ -45,18 +45,18 @@ public class StreamCacheConverter {
private XmlConverter converter = new XmlConverter();
@Converter
- public StreamCache convertToStreamCache(StreamSource source) throws TransformerException {
- return new SourceCache(converter.toString(source));
+ public StreamCache convertToStreamCache(StreamSource source) throws IOException {
+ return new StreamSourceCache(source);
}
@Converter
- public StreamCache convertToStreamCache(StringSource source) throws TransformerException {
+ public StreamCache convertToStreamCache(StringSource source) {
//no need to do stream caching for a StringSource
return null;
}
@Converter
- public StreamCache convertToStreamCache(BytesSource source) throws TransformerException {
+ public StreamCache convertToStreamCache(BytesSource source) {
//no need to do stream caching for a BytesSource
return null;
}
@@ -95,6 +95,35 @@ public void reset() {
}
}
+
+ /*
+ * {@link StreamCache} implementation for Cache the StreamSource {@link StreamSource}s
+ */
+ private class StreamSourceCache extends StreamSource implements StreamCache {
+ InputStreamCache inputStreamCache;
+ ReaderCache readCache;
+
+ public StreamSourceCache(StreamSource source) throws IOException {
+ if (source.getInputStream() != null) {
+ inputStreamCache = new InputStreamCache(IOConverter.toBytes(source.getInputStream()));
+ setInputStream(inputStreamCache);
+ setSystemId(source.getSystemId());
+ }
+ if (source.getReader() != null) {
+ readCache = new ReaderCache(IOConverter.toString(source.getReader()));
+ setReader(readCache);
+ }
+ }
+ public void reset() {
+ if (inputStreamCache != null) {
+ inputStreamCache.reset();
+ }
+ if (readCache != null) {
+ readCache.reset();
+ }
+ }
+
+ }
private class InputStreamCache extends ByteArrayInputStream implements StreamCache {
diff --git a/camel-core/src/main/java/org/apache/camel/processor/interceptor/StreamCaching.java b/camel-core/src/main/java/org/apache/camel/processor/interceptor/StreamCaching.java
index a98caa3e9c490..dc15a9ea9f748 100644
--- a/camel-core/src/main/java/org/apache/camel/processor/interceptor/StreamCaching.java
+++ b/camel-core/src/main/java/org/apache/camel/processor/interceptor/StreamCaching.java
@@ -54,4 +54,18 @@ public static void enable(RouteContext context) {
}
context.addInterceptStrategy(new StreamCaching());
}
+
+ /**
+ * Enable stream caching for a RouteContext
+ *
+ * @param context the route context
+ */
+ public static void disable(RouteContext context) {
+ for (InterceptStrategy strategy : context.getInterceptStrategies()) {
+ if (strategy instanceof StreamCaching) {
+ context.getInterceptStrategies().remove(strategy);
+ return;
+ }
+ }
+ }
}
diff --git a/camel-core/src/test/java/org/apache/camel/converter/stream/StreamCacheConverterTest.java b/camel-core/src/test/java/org/apache/camel/converter/stream/StreamCacheConverterTest.java
index 54dca0b1cabb4..364238d06e95b 100644
--- a/camel-core/src/test/java/org/apache/camel/converter/stream/StreamCacheConverterTest.java
+++ b/camel-core/src/test/java/org/apache/camel/converter/stream/StreamCacheConverterTest.java
@@ -20,6 +20,7 @@
import java.io.IOException;
import java.io.InputStream;
+import javax.xml.transform.Source;
import javax.xml.transform.TransformerException;
import javax.xml.transform.stream.StreamSource;
@@ -42,13 +43,14 @@ protected void setUp() throws Exception {
this.converter = new StreamCacheConverter();
}
- public void testConvertToStreamCacheStreamSource() throws TransformerException, FileNotFoundException {
+ public void testConvertToStreamCacheStreamSource() throws IOException, FileNotFoundException, TransformerException {
StreamSource source = new StreamSource(getTestFileStream());
- StreamSource cache = (StreamSource) converter.convertToStreamCache(source);
+ StreamCache cache = converter.convertToStreamCache(source);
//assert re-readability of the cached StreamSource
XmlConverter converter = new XmlConverter();
- assertNotNull(converter.toString(cache));
- assertNotNull(converter.toString(cache));
+ assertNotNull(converter.toString((Source)cache));
+ cache.reset();
+ assertNotNull(converter.toString((Source)cache));
}
public void testConvertToStreamCacheInputStream() throws IOException {
diff --git a/camel-core/src/test/java/org/apache/camel/processor/interceptor/StreamCachingInterceptorTest.java b/camel-core/src/test/java/org/apache/camel/processor/interceptor/StreamCachingInterceptorTest.java
index b610804121470..e6bc30b3b3446 100644
--- a/camel-core/src/test/java/org/apache/camel/processor/interceptor/StreamCachingInterceptorTest.java
+++ b/camel-core/src/test/java/org/apache/camel/processor/interceptor/StreamCachingInterceptorTest.java
@@ -81,7 +81,7 @@ public void testConvertStreamSourceWithRouteOnlyStreamCaching() throws Exception
template.sendBody("direct:b", message);
assertMockEndpointsSatisfied();
- assertTrue(b.assertExchangeReceived(0).getIn().getBody() instanceof StreamCache);
+ assertTrue(b.assertExchangeReceived(0).getIn().getBody() instanceof StreamCache);
assertEquals(b.assertExchangeReceived(0).getIn().getBody(String.class), MESSAGE);
}
|
52115e35af7a8361ae9e8b32499c24f9ba4174d0
|
intellij-community
|
support for forced compilation of a set of- files/modules--
|
a
|
https://github.com/JetBrains/intellij-community
|
diff --git a/java/compiler/impl/src/com/intellij/compiler/JpsServerManager.java b/java/compiler/impl/src/com/intellij/compiler/JpsServerManager.java
index f48f3078eaab9..b87426dda4105 100644
--- a/java/compiler/impl/src/com/intellij/compiler/JpsServerManager.java
+++ b/java/compiler/impl/src/com/intellij/compiler/JpsServerManager.java
@@ -192,14 +192,16 @@ public void run() {
}
@Nullable
- public RequestFuture submitCompilationTask(final String projectId, final List<String> modules, final boolean rebuild, final JpsServerResponseHandler handler) {
+ public RequestFuture submitCompilationTask(final String projectId, final boolean isRebuild, final boolean isMake, final Collection<String> modules, final Collection<String> paths, final JpsServerResponseHandler handler) {
final Ref<RequestFuture> futureRef = new Ref<RequestFuture>(null);
final RunnableFuture future = myTaskExecutor.submit(new Runnable() {
public void run() {
try {
final Client client = ensureServerRunningAndClientConnected(true);
if (client != null) {
- final RequestFuture requestFuture = client.sendCompileRequest(projectId, modules, rebuild, handler);
+ final RequestFuture requestFuture = isRebuild ?
+ client.sendRebuildRequest(projectId, handler) :
+ client.sendCompileRequest(isMake, projectId, modules, paths, handler);
futureRef.set(requestFuture);
}
else {
@@ -412,7 +414,7 @@ private Process launchServer(int port) throws ExecutionException {
// debugging
cmdLine.addParameter("-XX:+HeapDumpOnOutOfMemoryError");
- //cmdLine.addParameter("-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5008");
+ cmdLine.addParameter("-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5008");
// javac's VM should use the same default locale that IDEA uses in order for javac to print messages in 'correct' language
final String lang = System.getProperty("user.language");
diff --git a/java/compiler/impl/src/com/intellij/compiler/impl/CompileDriver.java b/java/compiler/impl/src/com/intellij/compiler/impl/CompileDriver.java
index 7cf572d4131c8..e227f523ce023 100644
--- a/java/compiler/impl/src/com/intellij/compiler/impl/CompileDriver.java
+++ b/java/compiler/impl/src/com/intellij/compiler/impl/CompileDriver.java
@@ -417,9 +417,9 @@ private void attachAnnotationProcessorsOutputDirectories(CompileContextEx contex
}
@Nullable
- private RequestFuture compileOnServer(final CompileContext compileContext, Collection<Module> modules, boolean isMake, @Nullable final CompileStatusNotification callback)
+ private RequestFuture compileOnServer(final CompileContext compileContext, Collection<Module> modules, final Collection<String> paths, @Nullable final CompileStatusNotification callback)
throws Exception {
- List<String> moduleNames = Collections.emptyList();
+ Collection<String> moduleNames = Collections.emptyList();
if (modules != null && modules.size() > 0) {
moduleNames = new ArrayList<String>(modules.size());
for (Module module : modules) {
@@ -427,7 +427,7 @@ private RequestFuture compileOnServer(final CompileContext compileContext, Colle
}
}
final JpsServerManager jpsServerManager = JpsServerManager.getInstance();
- return jpsServerManager.submitCompilationTask(myProject.getLocation(), moduleNames, !isMake, new JpsServerResponseHandlerAdapter() {
+ return jpsServerManager.submitCompilationTask(myProject.getLocation(), compileContext.isRebuild(), compileContext.isMake(), moduleNames, paths, new JpsServerResponseHandlerAdapter() {
public void handleCompileMessage(JpsRemoteProto.Message.Response.CompileMessage compilerMessage) {
final JpsRemoteProto.Message.Response.CompileMessage.Kind kind = compilerMessage.getKind();
@@ -560,7 +560,9 @@ public void run() {
if (message != null) {
compileContext.addMessage(message);
}
- final RequestFuture future = compileOnServer(compileContext, Arrays.asList(compileContext.getCompileScope().getAffectedModules()), compileContext.isMake(), callback);
+ final Collection<String> paths = fetchFiles(compileContext);
+ final List<Module> modules = paths.isEmpty()? Arrays.asList(compileContext.getCompileScope().getAffectedModules()) : Collections.<Module>emptyList();
+ final RequestFuture future = compileOnServer(compileContext, modules, paths, callback);
if (future != null) {
// start cancel watcher
ApplicationManager.getApplication().executeOnPooledThread(new Runnable() {
@@ -675,6 +677,32 @@ public void run() {
});
}
+ private static List<String> fetchFiles(CompileContextImpl context) {
+ if (context.isRebuild()) {
+ return Collections.emptyList();
+ }
+ final CompileScope scope = context.getCompileScope();
+ if (shouldFetchFiles(scope)) {
+ final List<String> paths = new ArrayList<String>();
+ for (VirtualFile file : scope.getFiles(null, true)) {
+ paths.add(file.getPath());
+ }
+ return paths;
+ }
+ return Collections.emptyList();
+ }
+
+ private static boolean shouldFetchFiles(CompileScope scope) {
+ if (scope instanceof CompositeScope) {
+ for (CompileScope compileScope : ((CompositeScope)scope).getScopes()) {
+ if (shouldFetchFiles(compileScope)) {
+ return true;
+ }
+ }
+ }
+ return scope instanceof OneProjectItemCompileScope || scope instanceof FileSetCompileScope;
+ }
+
private void doCompile(final CompileContextImpl compileContext,
final boolean isRebuild,
final boolean forceCompile,
diff --git a/jps/jps-builders/proto/jps_remote_proto.proto b/jps/jps-builders/proto/jps_remote_proto.proto
index 566e4ef09c6eb..f0f9d21bb1d57 100644
--- a/jps/jps-builders/proto/jps_remote_proto.proto
+++ b/jps/jps-builders/proto/jps_remote_proto.proto
@@ -42,6 +42,7 @@ message Message {
required Type command_type = 1;
optional string project_id = 2;
repeated string module_name = 3;
+ repeated string file_path = 4;
}
message ShutdownCommand {
diff --git a/jps/jps-builders/src/org/jetbrains/jps/api/JpsRemoteProto.java b/jps/jps-builders/src/org/jetbrains/jps/api/JpsRemoteProto.java
index 5d3dd9839192f..4279a42789930 100644
--- a/jps/jps-builders/src/org/jetbrains/jps/api/JpsRemoteProto.java
+++ b/jps/jps-builders/src/org/jetbrains/jps/api/JpsRemoteProto.java
@@ -845,6 +845,18 @@ public java.lang.String getModuleName(int index) {
return moduleName_.get(index);
}
+ // repeated string file_path = 4;
+ public static final int FILE_PATH_FIELD_NUMBER = 4;
+ private java.util.List<java.lang.String> filePath_ =
+ java.util.Collections.emptyList();
+ public java.util.List<java.lang.String> getFilePathList() {
+ return filePath_;
+ }
+ public int getFilePathCount() { return filePath_.size(); }
+ public java.lang.String getFilePath(int index) {
+ return filePath_.get(index);
+ }
+
private void initFields() {
commandType_ = org.jetbrains.jps.api.JpsRemoteProto.Message.Request.CompilationRequest.Type.REBUILD;
}
@@ -865,6 +877,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
for (java.lang.String element : getModuleNameList()) {
output.writeString(3, element);
}
+ for (java.lang.String element : getFilePathList()) {
+ output.writeString(4, element);
+ }
}
private int memoizedSerializedSize = -1;
@@ -890,6 +905,15 @@ public int getSerializedSize() {
size += dataSize;
size += 1 * getModuleNameList().size();
}
+ {
+ int dataSize = 0;
+ for (java.lang.String element : getFilePathList()) {
+ dataSize += com.google.protobuf.CodedOutputStream
+ .computeStringSizeNoTag(element);
+ }
+ size += dataSize;
+ size += 1 * getFilePathList().size();
+ }
memoizedSerializedSize = size;
return size;
}
@@ -1031,6 +1055,10 @@ public org.jetbrains.jps.api.JpsRemoteProto.Message.Request.CompilationRequest b
result.moduleName_ =
java.util.Collections.unmodifiableList(result.moduleName_);
}
+ if (result.filePath_ != java.util.Collections.EMPTY_LIST) {
+ result.filePath_ =
+ java.util.Collections.unmodifiableList(result.filePath_);
+ }
org.jetbrains.jps.api.JpsRemoteProto.Message.Request.CompilationRequest returnMe = result;
result = null;
return returnMe;
@@ -1050,6 +1078,12 @@ public Builder mergeFrom(org.jetbrains.jps.api.JpsRemoteProto.Message.Request.Co
}
result.moduleName_.addAll(other.moduleName_);
}
+ if (!other.filePath_.isEmpty()) {
+ if (result.filePath_.isEmpty()) {
+ result.filePath_ = new java.util.ArrayList<java.lang.String>();
+ }
+ result.filePath_.addAll(other.filePath_);
+ }
return this;
}
@@ -1084,6 +1118,10 @@ public Builder mergeFrom(
addModuleName(input.readString());
break;
}
+ case 34: {
+ addFilePath(input.readString());
+ break;
+ }
}
}
}
@@ -1171,6 +1209,46 @@ public Builder clearModuleName() {
return this;
}
+ // repeated string file_path = 4;
+ public java.util.List<java.lang.String> getFilePathList() {
+ return java.util.Collections.unmodifiableList(result.filePath_);
+ }
+ public int getFilePathCount() {
+ return result.getFilePathCount();
+ }
+ public java.lang.String getFilePath(int index) {
+ return result.getFilePath(index);
+ }
+ public Builder setFilePath(int index, java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ result.filePath_.set(index, value);
+ return this;
+ }
+ public Builder addFilePath(java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ if (result.filePath_.isEmpty()) {
+ result.filePath_ = new java.util.ArrayList<java.lang.String>();
+ }
+ result.filePath_.add(value);
+ return this;
+ }
+ public Builder addAllFilePath(
+ java.lang.Iterable<? extends java.lang.String> values) {
+ if (result.filePath_.isEmpty()) {
+ result.filePath_ = new java.util.ArrayList<java.lang.String>();
+ }
+ super.addAll(values, result.filePath_);
+ return this;
+ }
+ public Builder clearFilePath() {
+ result.filePath_ = java.util.Collections.emptyList();
+ return this;
+ }
+
// @@protoc_insertion_point(builder_scope:org.jetbrains.jpsservice.Message.Request.CompilationRequest)
}
diff --git a/jps/jps-builders/src/org/jetbrains/jps/api/ProtoUtil.java b/jps/jps-builders/src/org/jetbrains/jps/api/ProtoUtil.java
index b41c448084f72..c9f48b3517675 100644
--- a/jps/jps-builders/src/org/jetbrains/jps/api/ProtoUtil.java
+++ b/jps/jps-builders/src/org/jetbrains/jps/api/ProtoUtil.java
@@ -6,10 +6,7 @@
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
+import java.util.*;
/**
* @author Eugene Zhuravlev
@@ -32,15 +29,19 @@ public static JpsRemoteProto.Message.Failure createFailure(final String descript
}
public static JpsRemoteProto.Message.Request createMakeRequest(String project, Collection<String> modules) {
- return createCompileRequest(JpsRemoteProto.Message.Request.CompilationRequest.Type.MAKE, project, modules);
+ return createCompileRequest(JpsRemoteProto.Message.Request.CompilationRequest.Type.MAKE, project, modules, Collections.<String>emptyList());
}
- public static JpsRemoteProto.Message.Request createRebuildRequest(String project, Collection<String> modules) {
- return createCompileRequest(JpsRemoteProto.Message.Request.CompilationRequest.Type.REBUILD, project, modules);
+ public static JpsRemoteProto.Message.Request createForceCompileRequest(String project, Collection<String> modules, Collection<String> paths) {
+ return createCompileRequest(JpsRemoteProto.Message.Request.CompilationRequest.Type.FORCED_COMPILATION, project, modules, paths);
+ }
+
+ public static JpsRemoteProto.Message.Request createRebuildRequest(String project) {
+ return createCompileRequest(JpsRemoteProto.Message.Request.CompilationRequest.Type.REBUILD, project, Collections.<String>emptyList(), Collections.<String>emptyList());
}
public static JpsRemoteProto.Message.Request createCleanRequest(String project, Collection<String> modules) {
- return createCompileRequest(JpsRemoteProto.Message.Request.CompilationRequest.Type.CLEAN, project, modules);
+ return createCompileRequest(JpsRemoteProto.Message.Request.CompilationRequest.Type.CLEAN, project, modules, Collections.<String>emptyList());
}
public static JpsRemoteProto.Message.Request createCancelRequest(UUID compileSessionId) {
@@ -49,13 +50,16 @@ public static JpsRemoteProto.Message.Request createCancelRequest(UUID compileSes
return JpsRemoteProto.Message.Request.newBuilder().setRequestType(JpsRemoteProto.Message.Request.Type.CANCEL_BUILD_COMMAND).setCancelBuildCommand(builder.build()).build();
}
- public static JpsRemoteProto.Message.Request createCompileRequest(final JpsRemoteProto.Message.Request.CompilationRequest.Type command, String project, Collection<String> modules) {
+ public static JpsRemoteProto.Message.Request createCompileRequest(final JpsRemoteProto.Message.Request.CompilationRequest.Type command, String project, Collection<String> modules, Collection<String> paths) {
final JpsRemoteProto.Message.Request.CompilationRequest.Builder builder = JpsRemoteProto.Message.Request.CompilationRequest.newBuilder().setCommandType(
command);
builder.setProjectId(project);
if (modules.size() > 0) {
builder.addAllModuleName(modules);
}
+ if (paths.size() > 0) {
+ builder.addAllFilePath(paths);
+ }
return JpsRemoteProto.Message.Request.newBuilder().setRequestType(JpsRemoteProto.Message.Request.Type.COMPILE_REQUEST).setCompileRequest(
builder.build()).build();
}
diff --git a/jps/jps-builders/src/org/jetbrains/jps/client/Client.java b/jps/jps-builders/src/org/jetbrains/jps/client/Client.java
index 563623c944bad..ff3927a00fc9b 100644
--- a/jps/jps-builders/src/org/jetbrains/jps/client/Client.java
+++ b/jps/jps-builders/src/org/jetbrains/jps/client/Client.java
@@ -87,12 +87,18 @@ public void channelClosed(ChannelHandlerContext ctx, ChannelStateEvent e) throws
}
@NotNull
- public RequestFuture sendCompileRequest(String projectId, List<String> modules, boolean rebuild, JpsServerResponseHandler handler) throws Exception{
+ public RequestFuture sendCompileRequest(boolean isMake, String projectId, Collection<String> modules, Collection<String> paths, JpsServerResponseHandler handler) throws Exception{
checkConnected();
- return sendRequest(
- rebuild? ProtoUtil.createRebuildRequest(projectId, modules) : ProtoUtil.createMakeRequest(projectId, modules),
- handler
- );
+ final JpsRemoteProto.Message.Request request = isMake?
+ ProtoUtil.createMakeRequest(projectId, modules) :
+ ProtoUtil.createForceCompileRequest(projectId, modules, paths);
+ return sendRequest(request, handler);
+ }
+
+ @NotNull
+ public RequestFuture sendRebuildRequest(String projectId, JpsServerResponseHandler handler) throws Exception{
+ checkConnected();
+ return sendRequest(ProtoUtil.createRebuildRequest(projectId), handler);
}
@NotNull
diff --git a/jps/jps-builders/src/org/jetbrains/jps/incremental/AllProjectScope.java b/jps/jps-builders/src/org/jetbrains/jps/incremental/AllProjectScope.java
new file mode 100644
index 0000000000000..43d7a6b42903d
--- /dev/null
+++ b/jps/jps-builders/src/org/jetbrains/jps/incremental/AllProjectScope.java
@@ -0,0 +1,34 @@
+package org.jetbrains.jps.incremental;
+
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.jps.Module;
+import org.jetbrains.jps.Project;
+
+import java.io.File;
+
+/**
+ * @author Eugene Zhuravlev
+ * Date: 9/17/11
+ */
+public class AllProjectScope extends CompileScope {
+
+ private final boolean myIsForcedCompilation;
+
+ public AllProjectScope(Project project, boolean forcedCompilation) {
+ super(project);
+ myIsForcedCompilation = forcedCompilation;
+ }
+
+ public boolean isRecompilationForced(@NotNull Module module) {
+ return myIsForcedCompilation;
+ }
+
+ public boolean isAffected(@NotNull Module module) {
+ return true;
+ }
+
+ public boolean isAffected(Module module, @NotNull File file) {
+ return true;
+ }
+
+}
diff --git a/jps/jps-builders/src/org/jetbrains/jps/incremental/Builder.java b/jps/jps-builders/src/org/jetbrains/jps/incremental/Builder.java
index d52217717e6a9..79ded2d232588 100644
--- a/jps/jps-builders/src/org/jetbrains/jps/incremental/Builder.java
+++ b/jps/jps-builders/src/org/jetbrains/jps/incremental/Builder.java
@@ -82,7 +82,7 @@ public final boolean updateMappings(CompileContext context, final Mappings delta
}
else {
additionalPassRequired = context.isMake();
- context.markDirty(chunk);
+ context.markDirtyRecursively(chunk);
}
}
diff --git a/jps/jps-builders/src/org/jetbrains/jps/incremental/CompileContext.java b/jps/jps-builders/src/org/jetbrains/jps/incremental/CompileContext.java
index 57a9972ed5e21..d7b4fc88b2185 100644
--- a/jps/jps-builders/src/org/jetbrains/jps/incremental/CompileContext.java
+++ b/jps/jps-builders/src/org/jetbrains/jps/incremental/CompileContext.java
@@ -83,7 +83,7 @@ public void markDirty(final File file) throws Exception {
}
}
- public void markDirty(ModuleChunk chunk) throws Exception {
+ public void markDirtyRecursively(ModuleChunk chunk) throws Exception {
final Set<Module> modules = chunk.getModules();
final Set<Module> dirtyModules = new HashSet<Module>(modules);
@@ -147,7 +147,7 @@ void onChunkBuildComplete(@NotNull ModuleChunk chunk) throws Exception {
final List<RootDescriptor> roots = myRootsIndex.getModuleRoots(module);
for (RootDescriptor descriptor : roots) {
if (compilingTests? descriptor.isTestRoot : !descriptor.isTestRoot) {
- myFsState.markAllUpToDate(descriptor, myTsStorage, myCompilationStartStamp);
+ myFsState.markAllUpToDate(getScope(), descriptor, myTsStorage, myCompilationStartStamp);
}
}
}
@@ -178,7 +178,7 @@ public void processMessage(BuildMessage msg) {
public void processFilesToRecompile(ModuleChunk chunk, FileProcessor processor) throws Exception {
for (Module module : chunk.getModules()) {
- myFsState.processFilesToRecompile(module, isCompilingTests(), processor);
+ myFsState.processFilesToRecompile(this, module, processor);
}
}
@@ -188,10 +188,16 @@ final void ensureFSStateInitialized(ModuleChunk chunk) throws Exception {
markDirtyFiles(module, myTsStorage, true, isCompilingTests() ? DirtyMarkScope.TESTS : DirtyMarkScope.PRODUCTION, null);
}
else {
- // in 'make' mode
- // todo: consider situation when only several files are forced to be compiled => this is not project rebuild and not make
- if (myFsState.markInitialScanPerformed(module, isCompilingTests())) {
- initModuleFSState(module);
+ if (isMake()) {
+ if (myFsState.markInitialScanPerformed(module, isCompilingTests())) {
+ initModuleFSState(module);
+ }
+ }
+ else {
+ // forced compilation mode
+ if (getScope().isRecompilationForced(module)) {
+ markDirtyFiles(module, myTsStorage, true, isCompilingTests() ? DirtyMarkScope.TESTS : DirtyMarkScope.PRODUCTION, null);
+ }
}
}
}
@@ -233,7 +239,7 @@ public void setDone(float done) {
processMessage(new ProgressMessage("", done));
}
- private static enum DirtyMarkScope{
+ public static enum DirtyMarkScope{
PRODUCTION, TESTS, BOTH
}
diff --git a/jps/jps-builders/src/org/jetbrains/jps/incremental/CompileScope.java b/jps/jps-builders/src/org/jetbrains/jps/incremental/CompileScope.java
index 6b42d3c0552e0..d1f439c6eb438 100644
--- a/jps/jps-builders/src/org/jetbrains/jps/incremental/CompileScope.java
+++ b/jps/jps-builders/src/org/jetbrains/jps/incremental/CompileScope.java
@@ -1,46 +1,42 @@
package org.jetbrains.jps.incremental;
+import org.jetbrains.annotations.NotNull;
import org.jetbrains.jps.Module;
import org.jetbrains.jps.ModuleChunk;
import org.jetbrains.jps.Project;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Set;
+import java.io.File;
/**
* @author Eugene Zhuravlev
- * Date: 9/17/11
+ * Date: 1/15/12
*/
-public class CompileScope {
+public abstract class CompileScope {
+ @NotNull
private final Project myProject;
- private final Collection<Module> myModules;
- public CompileScope(Project project) {
- this(project, project.getModules().values());
- }
-
- public CompileScope(Project project, Collection<Module> modules) {
+ protected CompileScope(@NotNull Project project) {
myProject = project;
- myModules = modules;
}
- public Collection<Module> getAffectedModules() {
- return Collections.unmodifiableCollection(myModules);
- }
+ public abstract boolean isAffected(Module module, @NotNull File file);
+
+ public abstract boolean isAffected(@NotNull Module module);
+
+ public abstract boolean isRecompilationForced(@NotNull Module module);
- public boolean isAffected(ModuleChunk chunk) {
- final Set<Module> modules = chunk.getModules();
- for (Module module : getAffectedModules()) {
- if (modules.contains(module)) {
+ public final boolean isAffected(ModuleChunk chunk) {
+ for (Module module : chunk.getModules()) {
+ if (isAffected(module)) {
return true;
}
}
return false;
}
- public Project getProject() {
+ @NotNull
+ public final Project getProject() {
return myProject;
}
}
diff --git a/jps/jps-builders/src/org/jetbrains/jps/incremental/FSState.java b/jps/jps-builders/src/org/jetbrains/jps/incremental/FSState.java
index 950265a6950cb..cc89a8d71cd13 100644
--- a/jps/jps-builders/src/org/jetbrains/jps/incremental/FSState.java
+++ b/jps/jps-builders/src/org/jetbrains/jps/incremental/FSState.java
@@ -72,21 +72,26 @@ public void markDirty(final File file, final RootDescriptor rd, final @Nullable
}
}
- public void markAllUpToDate(final RootDescriptor rd, final TimestampStorage tsStorage, final long compilationStartStamp) throws Exception {
+ public void markAllUpToDate(CompileScope scope, final RootDescriptor rd, final TimestampStorage tsStorage, final long compilationStartStamp) throws Exception {
final FilesDelta delta = getDelta(rd.module);
final Set<File> files = delta.clearRecompile(rd.root, rd.isTestRoot);
if (files != null) {
final CompilerExcludes excludes = rd.module.getProject().getCompilerConfiguration().getExcludes();
for (File file : files) {
if (!excludes.isExcluded(file)) {
- final long stamp = file.lastModified();
- if (stamp > compilationStartStamp) {
- // if the file was modified after the compilation had started,
- // do not save the stamp considering file dirty
- delta.markRecompile(rd.root, rd.isTestRoot, file);
+ if (scope.isAffected(rd.module, file)) {
+ final long stamp = file.lastModified();
+ if (stamp > compilationStartStamp) {
+ // if the file was modified after the compilation had started,
+ // do not save the stamp considering file dirty
+ delta.markRecompile(rd.root, rd.isTestRoot, file);
+ }
+ else {
+ tsStorage.saveStamp(file, stamp);
+ }
}
else {
- tsStorage.saveStamp(file, stamp);
+ delta.markRecompile(rd.root, rd.isTestRoot, file);
}
}
else {
@@ -96,16 +101,19 @@ public void markAllUpToDate(final RootDescriptor rd, final TimestampStorage tsSt
}
}
-
- public boolean processFilesToRecompile(final Module module, final boolean forTests, final FileProcessor processor) throws Exception {
+ public boolean processFilesToRecompile(CompileContext context, final Module module, final FileProcessor processor) throws Exception {
final FilesDelta lastRoundDelta = myLastRoundDelta;
final FilesDelta delta = lastRoundDelta != null? lastRoundDelta : getDelta(module);
- final Map<File, Set<File>> data = delta.getSourcesToRecompile(forTests);
+ final Map<File, Set<File>> data = delta.getSourcesToRecompile(context.isCompilingTests());
final CompilerExcludes excludes = module.getProject().getCompilerConfiguration().getExcludes();
+ final CompileScope scope = context.getScope();
synchronized (data) {
for (Map.Entry<File, Set<File>> entry : data.entrySet()) {
final String root = FileUtil.toSystemIndependentName(entry.getKey().getPath());
for (File file : entry.getValue()) {
+ if (!scope.isAffected(module, file)) {
+ continue;
+ }
if (excludes.isExcluded(file)) {
continue;
}
diff --git a/jps/jps-builders/src/org/jetbrains/jps/incremental/IncProjectBuilder.java b/jps/jps-builders/src/org/jetbrains/jps/incremental/IncProjectBuilder.java
index c7ac05d9ebc93..23ec5ee1e5c34 100644
--- a/jps/jps-builders/src/org/jetbrains/jps/incremental/IncProjectBuilder.java
+++ b/jps/jps-builders/src/org/jetbrains/jps/incremental/IncProjectBuilder.java
@@ -71,7 +71,7 @@ public void build(CompileScope scope, final boolean isMake, final boolean isProj
JPS_SERVER_NAME, BuildMessage.Kind.INFO,
"Internal caches are corrupted or have outdated format, forcing project rebuild: " + e.getMessage())
);
- context = createContext(new CompileScope(scope.getProject()), false, true);
+ context = createContext(new AllProjectScope(scope.getProject(), true), false, true);
runBuild(context);
}
else {
diff --git a/jps/jps-builders/src/org/jetbrains/jps/incremental/ModulesAndFilesScope.java b/jps/jps-builders/src/org/jetbrains/jps/incremental/ModulesAndFilesScope.java
new file mode 100644
index 0000000000000..380c0daadf1f3
--- /dev/null
+++ b/jps/jps-builders/src/org/jetbrains/jps/incremental/ModulesAndFilesScope.java
@@ -0,0 +1,49 @@
+package org.jetbrains.jps.incremental;
+
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.jps.Module;
+import org.jetbrains.jps.Project;
+
+import java.io.File;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * @author Eugene Zhuravlev
+ * Date: 9/17/11
+ */
+public class ModulesAndFilesScope extends CompileScope {
+
+ private final Set<Module> myModules;
+ private final Map<Module, Set<File>> myFiles;
+ private final boolean myForcedCompilation;
+
+ public ModulesAndFilesScope(Project project, Collection<Module> modules, Map<Module, Set<File>> files, boolean isForcedCompilation) {
+ super(project);
+ myFiles = files;
+ myForcedCompilation = isForcedCompilation;
+ myModules = new HashSet<Module>(modules);
+ }
+
+ public boolean isRecompilationForced(@NotNull Module module) {
+ return myForcedCompilation && myModules.contains(module);
+ }
+
+ public boolean isAffected(@NotNull Module module) {
+ if (myModules.contains(module) || myFiles.containsKey(module)) {
+ return true;
+ }
+ return false;
+ }
+
+ public boolean isAffected(Module module, @NotNull File file) {
+ if (myModules.contains(module)) {
+ return true;
+ }
+ final Set<File> files = myFiles.get(module);
+ return files != null && files.contains(file);
+ }
+
+}
diff --git a/jps/jps-builders/src/org/jetbrains/jps/incremental/ModulesScope.java b/jps/jps-builders/src/org/jetbrains/jps/incremental/ModulesScope.java
new file mode 100644
index 0000000000000..56ffec4d7f465
--- /dev/null
+++ b/jps/jps-builders/src/org/jetbrains/jps/incremental/ModulesScope.java
@@ -0,0 +1,37 @@
+package org.jetbrains.jps.incremental;
+
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.jps.Module;
+import org.jetbrains.jps.Project;
+
+import java.io.File;
+import java.util.Set;
+
+/**
+ * @author Eugene Zhuravlev
+ * Date: 9/17/11
+ */
+public class ModulesScope extends CompileScope {
+
+ private final Set<Module> myModules;
+ private final boolean myForcedCompilation;
+
+ public ModulesScope(Project project, Set<Module> modules, boolean isForcedCompilation) {
+ super(project);
+ myModules = modules;
+ myForcedCompilation = isForcedCompilation;
+ }
+
+ public boolean isRecompilationForced(@NotNull Module module) {
+ return myForcedCompilation && isAffected(module);
+ }
+
+ public boolean isAffected(@NotNull Module module) {
+ return myModules.contains(module);
+ }
+
+ public boolean isAffected(Module module, @NotNull File file) {
+ return true; // for speed reasons
+ }
+
+}
diff --git a/jps/jps-builders/src/org/jetbrains/jps/server/ServerMessageHandler.java b/jps/jps-builders/src/org/jetbrains/jps/server/ServerMessageHandler.java
index e137a682165aa..6a4c6f28683e9 100644
--- a/jps/jps-builders/src/org/jetbrains/jps/server/ServerMessageHandler.java
+++ b/jps/jps-builders/src/org/jetbrains/jps/server/ServerMessageHandler.java
@@ -137,7 +137,7 @@ private JpsRemoteProto.Message startBuild(UUID sessionId, final ChannelHandlerCo
case MAKE:
case FORCED_COMPILATION:
case REBUILD: {
- final CompilationTask task = new CompilationTask(sessionId, channelContext, projectId, compileRequest.getModuleNameList());
+ final CompilationTask task = new CompilationTask(sessionId, channelContext, projectId, compileRequest.getModuleNameList(), compileRequest.getFilePathList());
if (myBuildsInProgress.putIfAbsent(projectId, task) == null) {
task.getBuildParams().buildType = convertCompileType(compileType);
task.getBuildParams().useInProcessJavac = true;
@@ -166,14 +166,16 @@ private class CompilationTask implements Runnable, BuildCanceledStatus {
private final UUID mySessionId;
private final ChannelHandlerContext myChannelContext;
private final String myProjectPath;
+ private final Collection<String> myPaths;
private final Set<String> myModules;
private final BuildParameters myParams;
private volatile boolean myCanceled = false;
- public CompilationTask(UUID sessionId, ChannelHandlerContext channelContext, String projectId, List<String> modules) {
+ public CompilationTask(UUID sessionId, ChannelHandlerContext channelContext, String projectId, Collection<String> modules, Collection<String> paths) {
mySessionId = sessionId;
myChannelContext = channelContext;
myProjectPath = projectId;
+ myPaths = paths;
myModules = new HashSet<String>(modules);
myParams = new BuildParameters();
}
@@ -194,7 +196,7 @@ public void run() {
Channels.write(myChannelContext.getChannel(), ProtoUtil.toMessage(mySessionId, ProtoUtil.createBuildStartedEvent("build started")));
Throwable error = null;
try {
- ServerState.getInstance().startBuild(myProjectPath, myModules, myParams, new MessageHandler() {
+ ServerState.getInstance().startBuild(myProjectPath, myModules, myPaths, myParams, new MessageHandler() {
public void processMessage(BuildMessage buildMessage) {
final JpsRemoteProto.Message.Response response;
if (buildMessage instanceof CompilerMessage) {
diff --git a/jps/jps-builders/src/org/jetbrains/jps/server/ServerState.java b/jps/jps-builders/src/org/jetbrains/jps/server/ServerState.java
index bfce4ceabeba1..b9d4999d5afe2 100644
--- a/jps/jps-builders/src/org/jetbrains/jps/server/ServerState.java
+++ b/jps/jps-builders/src/org/jetbrains/jps/server/ServerState.java
@@ -14,6 +14,7 @@
import org.jetbrains.jps.idea.IdeaProjectLoader;
import org.jetbrains.jps.incremental.*;
import org.jetbrains.jps.incremental.storage.ProjectTimestamps;
+import org.jetbrains.jps.incremental.storage.TimestampStorage;
import java.io.File;
import java.lang.reflect.Method;
@@ -96,7 +97,7 @@ public void clearProjectCache(Collection<String> projectPaths) {
}
}
- public void startBuild(String projectPath, Set<String> modules, final BuildParameters params, final MessageHandler msgHandler, BuildCanceledStatus cs) throws Throwable{
+ public void startBuild(String projectPath, Set<String> modules, Collection<String> paths, final BuildParameters params, final MessageHandler msgHandler, BuildCanceledStatus cs) throws Throwable{
final String projectName = getProjectName(projectPath);
BuildType buildType = params.buildType;
@@ -115,20 +116,7 @@ public void startBuild(String projectPath, Set<String> modules, final BuildParam
final Project project = pd.project;
try {
- final List<Module> toCompile = new ArrayList<Module>();
- if (modules != null && modules.size() > 0) {
- for (Module m : project.getModules().values()) {
- if (modules.contains(m.getName())){
- toCompile.add(m);
- }
- }
- }
- else {
- toCompile.addAll(project.getModules().values());
- }
-
- final CompileScope compileScope = new CompileScope(project, toCompile);
-
+ final CompileScope compileScope = createCompilationScope(buildType, pd, modules, paths);
final IncProjectBuilder builder = new IncProjectBuilder(pd, BuilderRegistry.getInstance(), cs);
if (msgHandler != null) {
builder.addMessageHandler(msgHandler);
@@ -158,6 +146,60 @@ public void startBuild(String projectPath, Set<String> modules, final BuildParam
}
}
+ private static CompileScope createCompilationScope(BuildType buildType, ProjectDescriptor pd, Set<String> modules, Collection<String> paths) throws Exception {
+ final CompileScope compileScope;
+ if (buildType == BuildType.PROJECT_REBUILD || (modules.isEmpty() && paths.isEmpty())) {
+ compileScope = new AllProjectScope(pd.project, buildType != BuildType.MAKE);
+ }
+ else {
+ final Set<Module> forcedModules;
+ if (!modules.isEmpty()) {
+ forcedModules = new HashSet<Module>();
+ for (Module m : pd.project.getModules().values()) {
+ if (modules.contains(m.getName())){
+ forcedModules.add(m);
+ }
+ }
+ }
+ else {
+ forcedModules = Collections.emptySet();
+ }
+
+ final TimestampStorage tsStorage = pd.timestamps.getStorage();
+
+ final Map<Module, Set<File>> filesToCompile;
+ if (!paths.isEmpty()) {
+ filesToCompile = new HashMap<Module, Set<File>>();
+ for (String path : paths) {
+ final File file = new File(path);
+ final RootDescriptor rd = pd.rootsIndex.getModuleAndRoot(file);
+ if (rd != null) {
+ Set<File> files = filesToCompile.get(rd.module);
+ if (files == null) {
+ files = new HashSet<File>();
+ filesToCompile.put(rd.module, files);
+ }
+ files.add(file);
+ if (buildType == BuildType.FORCED_COMPILATION) {
+ pd.fsState.markDirty(file, rd, tsStorage);
+ }
+ }
+ }
+ }
+ else {
+ filesToCompile = Collections.emptyMap();
+ }
+
+ if (filesToCompile.isEmpty()) {
+ compileScope = new ModulesScope(pd.project, forcedModules, buildType != BuildType.MAKE);
+ }
+ else {
+ compileScope = new ModulesAndFilesScope(pd.project, forcedModules, filesToCompile, buildType != BuildType.MAKE);
+ }
+ }
+ return compileScope;
+ }
+
private static void clearZipIndexCache() {
try {
final Class<?> indexClass = Class.forName("com.sun.tools.javac.zip.ZipFileIndex");
|
359a2756e623e605aaf29a1f3c7181666fae775c
|
orientdb
|
Improved automatic backup management of errors--
|
p
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/db/ODatabaseWrapperAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/db/ODatabaseWrapperAbstract.java
index b2f87de61ad..1793a65f43c 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/db/ODatabaseWrapperAbstract.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/db/ODatabaseWrapperAbstract.java
@@ -306,7 +306,7 @@ public void unregisterListener(final ODatabaseListener iListener) {
underlying.unregisterListener(iListener);
}
- public <V> V callInLock(Callable<V> iCallable, boolean iExclusiveLock) {
+ public <V> V callInLock(final Callable<V> iCallable, final boolean iExclusiveLock) {
return getStorage().callInLock(iCallable, iExclusiveLock);
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OClusterLocalLHPEPS.java b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OClusterLocalLHPEPS.java
index e310b14a2db..bcdc773873e 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OClusterLocalLHPEPS.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OClusterLocalLHPEPS.java
@@ -348,7 +348,7 @@ private void initState() {
public void truncate() throws IOException {
storage.checkForClusterPermissions(getName());
-
+
acquireExclusiveLock();
try {
long localSize = size;
@@ -451,6 +451,11 @@ public boolean addPhysicalPosition(OPhysicalPosition iPPosition) throws IOExcept
}
}
+ @Override
+ public String toString() {
+ return name;
+ }
+
public OPhysicalPosition getPhysicalPosition(OPhysicalPosition iPPosition) throws IOException {
acquireSharedLock();
try {
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocal.java b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocal.java
index a9f44d8614f..3998de6139a 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocal.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocal.java
@@ -1451,10 +1451,13 @@ protected int registerDataSegment(final OStorageDataConfiguration iConfig) throw
private int createClusterFromConfig(final OStorageClusterConfiguration iConfig) throws IOException {
OCluster cluster = clusterMap.get(iConfig.getName());
- if (cluster != null) {
- if (cluster instanceof OClusterLocal)
+ if (cluster instanceof OClusterLocal && iConfig instanceof OStoragePhysicalClusterLHPEPSConfiguration)
+ clusterMap.remove(iConfig.getName());
+ else if (cluster != null) {
+ if (cluster instanceof OClusterLocal) {
// ALREADY CONFIGURED, JUST OVERWRITE CONFIG
((OClusterLocal) cluster).configure(this, iConfig);
+ }
return -1;
}
|
76517d9ac96d3f7f9475743c539e2b9efbd0d0f0
|
ReactiveX-RxJava
|
1.x: fix Completable.onErrorComplete(Func1) not- relaying function crash (-4027)--
|
c
|
https://github.com/ReactiveX/RxJava
|
diff --git a/src/main/java/rx/Completable.java b/src/main/java/rx/Completable.java
index b5f9baebb7..69c0df708d 100644
--- a/src/main/java/rx/Completable.java
+++ b/src/main/java/rx/Completable.java
@@ -1663,8 +1663,9 @@ public void onError(Throwable e) {
try {
b = predicate.call(e);
} catch (Throwable ex) {
+ Exceptions.throwIfFatal(ex);
e = new CompositeException(Arrays.asList(e, ex));
- return;
+ b = false;
}
if (b) {
diff --git a/src/test/java/rx/CompletableTest.java b/src/test/java/rx/CompletableTest.java
index a2a49d6c2d..4aee2eed85 100644
--- a/src/test/java/rx/CompletableTest.java
+++ b/src/test/java/rx/CompletableTest.java
@@ -4112,4 +4112,29 @@ public void onStart() {
ts.assertCompleted();
}
+ @Test
+ public void onErrorCompleteFunctionThrows() {
+ TestSubscriber<String> ts = new TestSubscriber<String>();
+
+ error.completable.onErrorComplete(new Func1<Throwable, Boolean>() {
+ @Override
+ public Boolean call(Throwable t) {
+ throw new TestException("Forced inner failure");
+ }
+ }).subscribe(ts);
+
+ ts.assertNoValues();
+ ts.assertNotCompleted();
+ ts.assertError(CompositeException.class);
+
+ CompositeException composite = (CompositeException)ts.getOnErrorEvents().get(0);
+
+ List<Throwable> errors = composite.getExceptions();
+ Assert.assertEquals(2, errors.size());
+
+ Assert.assertTrue(errors.get(0).toString(), errors.get(0) instanceof TestException);
+ Assert.assertEquals(errors.get(0).toString(), null, errors.get(0).getMessage());
+ Assert.assertTrue(errors.get(1).toString(), errors.get(1) instanceof TestException);
+ Assert.assertEquals(errors.get(1).toString(), "Forced inner failure", errors.get(1).getMessage());
+ }
}
\ No newline at end of file
|
886a96aed8bf0594e54197e870fc9d53c1282578
|
hbase
|
HBASE-9369 Add support for 1- and 2-byte integers- in OrderedBytes and provide types (He Liangliang)--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1524297 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/hbase
|
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt16.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt16.java
new file mode 100644
index 000000000000..d353c15ba146
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt16.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.types;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hbase.util.Order;
+import org.apache.hadoop.hbase.util.OrderedBytes;
+import org.apache.hadoop.hbase.util.PositionedByteRange;
+
+
+/**
+ * A {@code short} of 16-bits using a fixed-length encoding. Built on
+ * {@link OrderedBytes#encodeInt16(PositionedByteRange, short, Order)}.
+ */
[email protected]
[email protected]
+public class OrderedInt16 extends OrderedBytesBase<Short> {
+
+ public static final OrderedInt16 ASCENDING = new OrderedInt16(Order.ASCENDING);
+ public static final OrderedInt16 DESCENDING = new OrderedInt16(Order.DESCENDING);
+
+ protected OrderedInt16(Order order) { super(order); }
+
+ @Override
+ public boolean isNullable() { return false; }
+
+ @Override
+ public int encodedLength(Short val) { return 3; }
+
+ @Override
+ public Class<Short> encodedClass() { return Short.class; }
+
+ @Override
+ public Short decode(PositionedByteRange src) {
+ return OrderedBytes.decodeInt16(src);
+ }
+
+ @Override
+ public int encode(PositionedByteRange dst, Short val) {
+ if (null == val) throw new IllegalArgumentException("Null values not supported.");
+ return OrderedBytes.encodeInt16(dst, val, order);
+ }
+
+ /**
+ * Read a {@code short} value from the buffer {@code src}.
+ */
+ public short decodeShort(PositionedByteRange src) {
+ return OrderedBytes.decodeInt16(src);
+ }
+
+ /**
+ * Write instance {@code val} into buffer {@code dst}.
+ */
+ public int encodeShort(PositionedByteRange dst, short val) {
+ return OrderedBytes.encodeInt16(dst, val, order);
+ }
+}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt8.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt8.java
new file mode 100644
index 000000000000..29ed504de8fe
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt8.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.types;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hbase.util.Order;
+import org.apache.hadoop.hbase.util.OrderedBytes;
+import org.apache.hadoop.hbase.util.PositionedByteRange;
+
+
+/**
+ * A {@code byte} of 8-bits using a fixed-length encoding. Built on
+ * {@link OrderedBytes#encodeInt8(PositionedByteRange, byte, Order)}.
+ */
[email protected]
[email protected]
+public class OrderedInt8 extends OrderedBytesBase<Byte> {
+
+ public static final OrderedInt8 ASCENDING = new OrderedInt8(Order.ASCENDING);
+ public static final OrderedInt8 DESCENDING = new OrderedInt8(Order.DESCENDING);
+
+ protected OrderedInt8(Order order) { super(order); }
+
+ @Override
+ public boolean isNullable() { return false; }
+
+ @Override
+ public int encodedLength(Byte val) { return 2; }
+
+ @Override
+ public Class<Byte> encodedClass() { return Byte.class; }
+
+ @Override
+ public Byte decode(PositionedByteRange src) {
+ return OrderedBytes.decodeInt8(src);
+ }
+
+ @Override
+ public int encode(PositionedByteRange dst, Byte val) {
+ if (null == val) throw new IllegalArgumentException("Null values not supported.");
+ return OrderedBytes.encodeInt8(dst, val, order);
+ }
+
+ /**
+ * Read a {@code byte} value from the buffer {@code src}.
+ */
+ public byte decodeByte(PositionedByteRange src) {
+ return OrderedBytes.decodeInt8(src);
+ }
+
+ /**
+ * Write instance {@code val} into buffer {@code dst}.
+ */
+ public int encodeByte(PositionedByteRange dst, byte val) {
+ return OrderedBytes.encodeInt8(dst, val, order);
+ }
+}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawByte.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawByte.java
new file mode 100644
index 000000000000..5091daa1d331
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawByte.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.types;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Order;
+import org.apache.hadoop.hbase.util.PositionedByteRange;
+
+/**
+ * An {@code DataType} for interacting with values encoded using
+ * {@link Bytes#putByte(byte[], int, byte)}. Intended to make it easier to
+ * transition away from direct use of {@link Bytes}.
+ * @see Bytes#putByte(byte[], int, byte)
+ */
[email protected]
[email protected]
+public class RawByte implements DataType<Byte> {
+
+ @Override
+ public boolean isOrderPreserving() { return false; }
+
+ @Override
+ public Order getOrder() { return null; }
+
+ @Override
+ public boolean isNullable() { return false; }
+
+ @Override
+ public boolean isSkippable() { return true; }
+
+ @Override
+ public int encodedLength(Byte val) { return Bytes.SIZEOF_BYTE; }
+
+ @Override
+ public Class<Byte> encodedClass() { return Byte.class; }
+
+ @Override
+ public int skip(PositionedByteRange src) {
+ src.setPosition(src.getPosition() + Bytes.SIZEOF_BYTE);
+ return Bytes.SIZEOF_BYTE;
+ }
+
+ @Override
+ public Byte decode(PositionedByteRange src) {
+ byte val = src.getBytes()[src.getOffset() + src.getPosition()];
+ skip(src);
+ return val;
+ }
+
+ @Override
+ public int encode(PositionedByteRange dst, Byte val) {
+ Bytes.putByte(dst.getBytes(), dst.getOffset() + dst.getPosition(), val);
+ return skip(dst);
+ }
+
+ /**
+ * Read a {@code byte} value from the buffer {@code buff}.
+ */
+ public byte decodeByte(byte[] buff, int offset) {
+ return buff[offset];
+ }
+
+ /**
+ * Write instance {@code val} into buffer {@code buff}.
+ */
+ public int encodeByte(byte[] buff, int offset, byte val) {
+ return Bytes.putByte(buff, offset, val);
+ }
+}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawShort.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawShort.java
new file mode 100644
index 000000000000..4fae5d74f185
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawShort.java
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.types;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Order;
+import org.apache.hadoop.hbase.util.PositionedByteRange;
+
+/**
+ * An {@code DataType} for interacting with values encoded using
+ * {@link Bytes#putShort(byte[], int, short)}. Intended to make it easier to
+ * transition away from direct use of {@link Bytes}.
+ * @see Bytes#putShort(byte[], int, short)
+ * @see Bytes#toShort(byte[])
+ */
[email protected]
[email protected]
+public class RawShort implements DataType<Short> {
+
+ @Override
+ public boolean isOrderPreserving() { return false; }
+
+ @Override
+ public Order getOrder() { return null; }
+
+ @Override
+ public boolean isNullable() { return false; }
+
+ @Override
+ public boolean isSkippable() { return true; }
+
+ @Override
+ public int encodedLength(Short val) { return Bytes.SIZEOF_SHORT; }
+
+ @Override
+ public Class<Short> encodedClass() { return Short.class; }
+
+ @Override
+ public int skip(PositionedByteRange src) {
+ src.setPosition(src.getPosition() + Bytes.SIZEOF_SHORT);
+ return Bytes.SIZEOF_SHORT;
+ }
+
+ @Override
+ public Short decode(PositionedByteRange src) {
+ short val = Bytes.toShort(src.getBytes(), src.getOffset() + src.getPosition());
+ skip(src);
+ return val;
+ }
+
+ @Override
+ public int encode(PositionedByteRange dst, Short val) {
+ Bytes.putShort(dst.getBytes(), dst.getOffset() + dst.getPosition(), val);
+ return skip(dst);
+ }
+
+ /**
+ * Read a {@code short} value from the buffer {@code buff}.
+ */
+ public short decodeShort(byte[] buff, int offset) {
+ return Bytes.toShort(buff, offset);
+ }
+
+ /**
+ * Write instance {@code val} into buffer {@code buff}.
+ */
+ public int encodeShort(byte[] buff, int offset, short val) {
+ return Bytes.putShort(buff, offset, val);
+ }
+}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
index b99fc7b99169..0bc20c8614c0 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
@@ -278,6 +278,7 @@ public class OrderedBytes {
* implementations can be inserted into the total ordering enforced here.
*/
private static final byte NULL = 0x05;
+ // room for 1 expansion type
private static final byte NEG_INF = 0x07;
private static final byte NEG_LARGE = 0x08;
private static final byte NEG_MED_MIN = 0x09;
@@ -289,14 +290,21 @@ public class OrderedBytes {
private static final byte POS_MED_MAX = 0x21;
private static final byte POS_LARGE = 0x22;
private static final byte POS_INF = 0x23;
- private static final byte NAN = 0x25;
- private static final byte FIXED_INT32 = 0x27;
- private static final byte FIXED_INT64 = 0x28;
+ // room for 2 expansion type
+ private static final byte NAN = 0x26;
+ // room for 2 expansion types
+ private static final byte FIXED_INT8 = 0x29;
+ private static final byte FIXED_INT16 = 0x2a;
+ private static final byte FIXED_INT32 = 0x2b;
+ private static final byte FIXED_INT64 = 0x2c;
+ // room for 3 expansion types
private static final byte FIXED_FLOAT32 = 0x30;
private static final byte FIXED_FLOAT64 = 0x31;
- private static final byte TEXT = 0x33;
- private static final byte BLOB_VAR = 0x35;
- private static final byte BLOB_COPY = 0x36;
+ // room for 2 expansion type
+ private static final byte TEXT = 0x34;
+ // room for 2 expansion type
+ private static final byte BLOB_VAR = 0x37;
+ private static final byte BLOB_COPY = 0x38;
/*
* The following constant values are used by encoding implementations
@@ -1198,6 +1206,59 @@ public static int encodeNull(PositionedByteRange dst, Order ord) {
return 1;
}
+ /**
+ * Encode an {@code int8} value using the fixed-length encoding.
+ * @return the number of bytes written.
+ * @see #encodeInt64(PositionedByteRange, long, Order)
+ * @see #decodeInt8(PositionedByteRange)
+ */
+ public static int encodeInt8(PositionedByteRange dst, byte val, Order ord) {
+ final int offset = dst.getOffset(), start = dst.getPosition();
+ dst.put(FIXED_INT8)
+ .put((byte) (val ^ 0x80));
+ ord.apply(dst.getBytes(), offset + start, 2);
+ return 2;
+ }
+
+ /**
+ * Decode an {@code int8} value.
+ * @see #encodeInt8(PositionedByteRange, byte, Order)
+ */
+ public static byte decodeInt8(PositionedByteRange src) {
+ final byte header = src.get();
+ assert header == FIXED_INT8 || header == DESCENDING.apply(FIXED_INT8);
+ Order ord = header == FIXED_INT8 ? ASCENDING : DESCENDING;
+ return (byte)((ord.apply(src.get()) ^ 0x80) & 0xff);
+ }
+
+ /**
+ * Encode an {@code int16} value using the fixed-length encoding.
+ * @return the number of bytes written.
+ * @see #encodeInt64(PositionedByteRange, long, Order)
+ * @see #decodeInt16(PositionedByteRange)
+ */
+ public static int encodeInt16(PositionedByteRange dst, short val, Order ord) {
+ final int offset = dst.getOffset(), start = dst.getPosition();
+ dst.put(FIXED_INT16)
+ .put((byte) ((val >> 8) ^ 0x80))
+ .put((byte) val);
+ ord.apply(dst.getBytes(), offset + start, 3);
+ return 3;
+ }
+
+ /**
+ * Decode an {@code int16} value.
+ * @see #encodeInt16(PositionedByteRange, short, Order)
+ */
+ public static short decodeInt16(PositionedByteRange src) {
+ final byte header = src.get();
+ assert header == FIXED_INT16 || header == DESCENDING.apply(FIXED_INT16);
+ Order ord = header == FIXED_INT16 ? ASCENDING : DESCENDING;
+ short val = (short) ((ord.apply(src.get()) ^ 0x80) & 0xff);
+ val = (short) ((val << 8) + (ord.apply(src.get()) & 0xff));
+ return val;
+ }
+
/**
* Encode an {@code int32} value using the fixed-length encoding.
* @return the number of bytes written.
@@ -1270,14 +1331,14 @@ public static int decodeInt32(PositionedByteRange src) {
public static int encodeInt64(PositionedByteRange dst, long val, Order ord) {
final int offset = dst.getOffset(), start = dst.getPosition();
dst.put(FIXED_INT64)
- .put((byte) ((val >> 56) ^ 0x80))
- .put((byte) (val >> 48))
- .put((byte) (val >> 40))
- .put((byte) (val >> 32))
- .put((byte) (val >> 24))
- .put((byte) (val >> 16))
- .put((byte) (val >> 8))
- .put((byte) val);
+ .put((byte) ((val >> 56) ^ 0x80))
+ .put((byte) (val >> 48))
+ .put((byte) (val >> 40))
+ .put((byte) (val >> 32))
+ .put((byte) (val >> 24))
+ .put((byte) (val >> 16))
+ .put((byte) (val >> 8))
+ .put((byte) val);
ord.apply(dst.getBytes(), offset + start, 9);
return 9;
}
@@ -1611,6 +1672,12 @@ public static int skip(PositionedByteRange src) {
return 1;
case NAN:
return 1;
+ case FIXED_INT8:
+ src.setPosition(src.getPosition() + 1);
+ return src.getPosition() - start;
+ case FIXED_INT16:
+ src.setPosition(src.getPosition() + 2);
+ return src.getPosition() - start;
case FIXED_INT32:
src.setPosition(src.getPosition() + 4);
return src.getPosition() - start;
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java
index 509d76e666fd..5ecc45461da9 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java
@@ -358,6 +358,142 @@ public void testNumericIntRealCompatibility() {
}
}
+ /**
+ * Test int8 encoding.
+ */
+ @Test
+ public void testInt8() {
+ Byte[] vals =
+ { Byte.MIN_VALUE, Byte.MIN_VALUE / 2, 0, Byte.MAX_VALUE / 2, Byte.MAX_VALUE };
+
+ /*
+ * assert encoded values match decoded values. encode into target buffer
+ * starting at an offset to detect over/underflow conditions.
+ */
+ for (Order ord : new Order[] { Order.ASCENDING, Order.DESCENDING }) {
+ for (int i = 0; i < vals.length; i++) {
+ // allocate a buffer 3-bytes larger than necessary to detect over/underflow
+ byte[] a = new byte[2 + 3];
+ PositionedByteRange buf1 = new SimplePositionedByteRange(a, 1, 2 + 1);
+ buf1.setPosition(1);
+
+ // verify encode
+ assertEquals("Surprising return value.",
+ 2, OrderedBytes.encodeInt8(buf1, vals[i], ord));
+ assertEquals("Broken test: serialization did not consume entire buffer.",
+ buf1.getLength(), buf1.getPosition());
+ assertEquals("Surprising serialized length.", 2, buf1.getPosition() - 1);
+ assertEquals("Buffer underflow.", 0, a[0]);
+ assertEquals("Buffer underflow.", 0, a[1]);
+ assertEquals("Buffer overflow.", 0, a[a.length - 1]);
+
+ // verify skip
+ buf1.setPosition(1);
+ assertEquals("Surprising return value.", 2, OrderedBytes.skip(buf1));
+ assertEquals("Did not skip enough bytes.", 2, buf1.getPosition() - 1);
+
+ // verify decode
+ buf1.setPosition(1);
+ assertEquals("Deserialization failed.",
+ vals[i].byteValue(), OrderedBytes.decodeInt8(buf1));
+ assertEquals("Did not consume enough bytes.", 2, buf1.getPosition() - 1);
+ }
+ }
+
+ /*
+ * assert natural sort order is preserved by the codec.
+ */
+ for (Order ord : new Order[] { Order.ASCENDING, Order.DESCENDING }) {
+ byte[][] encoded = new byte[vals.length][2];
+ PositionedByteRange pbr = new SimplePositionedByteRange();
+ for (int i = 0; i < vals.length; i++) {
+ OrderedBytes.encodeInt8(pbr.set(encoded[i]), vals[i], ord);
+ }
+
+ Arrays.sort(encoded, Bytes.BYTES_COMPARATOR);
+ Byte[] sortedVals = Arrays.copyOf(vals, vals.length);
+ if (ord == Order.ASCENDING) Arrays.sort(sortedVals);
+ else Arrays.sort(sortedVals, Collections.reverseOrder());
+
+ for (int i = 0; i < sortedVals.length; i++) {
+ int decoded = OrderedBytes.decodeInt8(pbr.set(encoded[i]));
+ assertEquals(
+ String.format(
+ "Encoded representations do not preserve natural order: <%s>, <%s>, %s",
+ sortedVals[i], decoded, ord),
+ sortedVals[i].byteValue(), decoded);
+ }
+ }
+ }
+
+ /**
+ * Test int16 encoding.
+ */
+ @Test
+ public void testInt16() {
+ Short[] vals =
+ { Short.MIN_VALUE, Short.MIN_VALUE / 2, 0, Short.MAX_VALUE / 2, Short.MAX_VALUE };
+
+ /*
+ * assert encoded values match decoded values. encode into target buffer
+ * starting at an offset to detect over/underflow conditions.
+ */
+ for (Order ord : new Order[] { Order.ASCENDING, Order.DESCENDING }) {
+ for (int i = 0; i < vals.length; i++) {
+ // allocate a buffer 3-bytes larger than necessary to detect over/underflow
+ byte[] a = new byte[3 + 3];
+ PositionedByteRange buf1 = new SimplePositionedByteRange(a, 1, 3 + 1);
+ buf1.setPosition(1);
+
+ // verify encode
+ assertEquals("Surprising return value.",
+ 3, OrderedBytes.encodeInt16(buf1, vals[i], ord));
+ assertEquals("Broken test: serialization did not consume entire buffer.",
+ buf1.getLength(), buf1.getPosition());
+ assertEquals("Surprising serialized length.", 3, buf1.getPosition() - 1);
+ assertEquals("Buffer underflow.", 0, a[0]);
+ assertEquals("Buffer underflow.", 0, a[1]);
+ assertEquals("Buffer overflow.", 0, a[a.length - 1]);
+
+ // verify skip
+ buf1.setPosition(1);
+ assertEquals("Surprising return value.", 3, OrderedBytes.skip(buf1));
+ assertEquals("Did not skip enough bytes.", 3, buf1.getPosition() - 1);
+
+ // verify decode
+ buf1.setPosition(1);
+ assertEquals("Deserialization failed.",
+ vals[i].shortValue(), OrderedBytes.decodeInt16(buf1));
+ assertEquals("Did not consume enough bytes.", 3, buf1.getPosition() - 1);
+ }
+ }
+
+ /*
+ * assert natural sort order is preserved by the codec.
+ */
+ for (Order ord : new Order[] { Order.ASCENDING, Order.DESCENDING }) {
+ byte[][] encoded = new byte[vals.length][3];
+ PositionedByteRange pbr = new SimplePositionedByteRange();
+ for (int i = 0; i < vals.length; i++) {
+ OrderedBytes.encodeInt16(pbr.set(encoded[i]), vals[i], ord);
+ }
+
+ Arrays.sort(encoded, Bytes.BYTES_COMPARATOR);
+ Short[] sortedVals = Arrays.copyOf(vals, vals.length);
+ if (ord == Order.ASCENDING) Arrays.sort(sortedVals);
+ else Arrays.sort(sortedVals, Collections.reverseOrder());
+
+ for (int i = 0; i < sortedVals.length; i++) {
+ int decoded = OrderedBytes.decodeInt16(pbr.set(encoded[i]));
+ assertEquals(
+ String.format(
+ "Encoded representations do not preserve natural order: <%s>, <%s>, %s",
+ sortedVals[i], decoded, ord),
+ sortedVals[i].shortValue(), decoded);
+ }
+ }
+ }
+
/**
* Test int32 encoding.
*/
@@ -898,7 +1034,8 @@ public void testBlobCopy() {
@Test(expected = IllegalArgumentException.class)
public void testBlobCopyNoZeroBytes() {
byte[] val = { 0x01, 0x02, 0x00, 0x03 };
- byte[] ascExpected = { 0x36, 0x01, 0x02, 0x00, 0x03 };
+ // TODO: implementation detail leaked here.
+ byte[] ascExpected = { 0x38, 0x01, 0x02, 0x00, 0x03 };
PositionedByteRange buf = new SimplePositionedByteRange(val.length + 1);
OrderedBytes.encodeBlobCopy(buf, val, Order.ASCENDING);
assertArrayEquals(ascExpected, buf.getBytes());
@@ -923,6 +1060,8 @@ public void testSkip() {
BigDecimal posLarge = negLarge.negate();
double posInf = Double.POSITIVE_INFINITY;
double nan = Double.NaN;
+ byte int8 = 100;
+ short int16 = 100;
int int32 = 100;
long int64 = 100l;
float float32 = 100.0f;
@@ -988,6 +1127,16 @@ public void testSkip() {
buff.setPosition(0);
assertEquals(o, OrderedBytes.skip(buff));
+ buff.setPosition(0);
+ o = OrderedBytes.encodeInt8(buff, int8, ord);
+ buff.setPosition(0);
+ assertEquals(o, OrderedBytes.skip(buff));
+
+ buff.setPosition(0);
+ o = OrderedBytes.encodeInt16(buff, int16, ord);
+ buff.setPosition(0);
+ assertEquals(o, OrderedBytes.skip(buff));
+
buff.setPosition(0);
o = OrderedBytes.encodeInt32(buff, int32, ord);
buff.setPosition(0);
|
45e4ec9000f8fbca1a5f04c68c1b516388f02a8f
|
hbase
|
HBASE-10066. Use ByteArrayOutputStream-writeTo- where appropriate--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1547294 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/hbase
|
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index 46d145eb9c55..2f38b3edf708 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -1752,9 +1752,9 @@ public static long getTotalRequestsCount(RegionLoad rl) {
public static byte [] toDelimitedByteArray(final Message m) throws IOException {
// Allocate arbitrary big size so we avoid resizing.
ByteArrayOutputStream baos = new ByteArrayOutputStream(4096);
+ baos.write(PB_MAGIC);
m.writeDelimitedTo(baos);
- baos.close();
- return ProtobufUtil.prependPBMagic(baos.toByteArray());
+ return baos.toByteArray();
}
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
index 6c72968d86cf..2d3e11a21ee5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
@@ -180,7 +180,7 @@ void serialize(DataOutputStream outputStream) throws IOException {
// The last 4 bytes of the file encode the major and minor version universally
baosDos.writeInt(materializeVersion(majorVersion, minorVersion));
- outputStream.write(baos.toByteArray());
+ baos.writeTo(outputStream);
}
/**
@@ -208,8 +208,10 @@ void serializeAsPB(DataOutputStream output) throws IOException {
if (encryptionKey != null) {
builder.setEncryptionKey(ZeroCopyLiteralByteString.wrap(encryptionKey));
}
+ // We need this extra copy unfortunately to determine the final size of the
+ // delimited output, see use of baos.size() below.
builder.build().writeDelimitedTo(baos);
- output.write(baos.toByteArray());
+ baos.writeTo(output);
// Pad to make up the difference between variable PB encoding length and the
// length when encoded as writable under earlier V2 formats. Failure to pad
// properly or if the PB encoding is too big would mean the trailer wont be read
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java
index 6fecdfc4bb85..6f2377076561 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java
@@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hbase.io.hfile;
-import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
@@ -216,15 +215,11 @@ protected void finishClose(FixedFileTrailer trailer) throws IOException {
if (cryptoContext != Encryption.Context.NONE) {
// Wrap the context's key and write it as the encryption metadata, the wrapper includes
// all information needed for decryption
- ByteArrayOutputStream os = new ByteArrayOutputStream();
- os.write(EncryptionUtil.wrapKey(cryptoContext.getConf(),
+ trailer.setEncryptionKey(EncryptionUtil.wrapKey(cryptoContext.getConf(),
cryptoContext.getConf().get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY,
User.getCurrent().getShortName()),
cryptoContext.getKey()));
- os.close();
- trailer.setEncryptionKey(os.toByteArray());
}
-
// Now we can finish the close
super.finishClose(trailer);
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureWALCellCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureWALCellCodec.java
index d53693d43cbe..35b7193f2044 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureWALCellCodec.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureWALCellCodec.java
@@ -194,9 +194,8 @@ public void write(Cell cell) throws IOException {
cout.write(kvBuffer, pos, remainingLength);
cout.close();
- byte[] codedBytes = baos.toByteArray();
- StreamUtils.writeRawVInt32(out, codedBytes.length);
- out.write(codedBytes);
+ StreamUtils.writeRawVInt32(out, baos.size());
+ baos.writeTo(out);
}
}
|
eb1027b5e8c047059f68e7547188d08c7fde0b6f
|
intellij-community
|
fixed PY-12251 Project Interpreters: Create- virtualenv from settings doesn't update warnigns about python package- management tools--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/python/ide/src/com/jetbrains/python/configuration/PyActiveSdkConfigurable.java b/python/ide/src/com/jetbrains/python/configuration/PyActiveSdkConfigurable.java
index 4a193b3cccd9d..8c3cd3268fefe 100644
--- a/python/ide/src/com/jetbrains/python/configuration/PyActiveSdkConfigurable.java
+++ b/python/ide/src/com/jetbrains/python/configuration/PyActiveSdkConfigurable.java
@@ -99,8 +99,7 @@ private void initContent() {
public void actionPerformed(ActionEvent e) {
final Sdk selectedSdk = (Sdk)mySdkCombo.getSelectedItem();
myPackagesPanel.updatePackages(selectedSdk != null ? new PyPackageManagementService(myProject, selectedSdk) : null);
- if (selectedSdk != null)
- myPackagesPanel.updateNotifications(selectedSdk);
+ myPackagesPanel.updateNotifications(selectedSdk);
}
});
myDetailsCallback = new NullableConsumer<Sdk>() {
@@ -148,6 +147,7 @@ public void consume(Sdk sdk) {
updateSdkList(false);
mySdkCombo.getModel().setSelectedItem(sdk);
myPackagesPanel.updatePackages(new PyPackageManagementService(myProject, sdk));
+ myPackagesPanel.updateNotifications(sdk);
}
}
);
diff --git a/python/src/com/jetbrains/python/packaging/ui/PyInstalledPackagesPanel.java b/python/src/com/jetbrains/python/packaging/ui/PyInstalledPackagesPanel.java
index a9e0ca211171b..0fdbc53a55860 100644
--- a/python/src/com/jetbrains/python/packaging/ui/PyInstalledPackagesPanel.java
+++ b/python/src/com/jetbrains/python/packaging/ui/PyInstalledPackagesPanel.java
@@ -33,6 +33,7 @@
import com.jetbrains.python.sdk.flavors.IronPythonSdkFlavor;
import com.jetbrains.python.sdk.flavors.PythonSdkFlavor;
import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
import java.util.List;
import java.util.Set;
@@ -75,7 +76,11 @@ private Sdk getSelectedSdk() {
return service != null ? service.getSdk() : null;
}
- public void updateNotifications(@NotNull final Sdk selectedSdk) {
+ public void updateNotifications(@Nullable final Sdk selectedSdk) {
+ if (selectedSdk == null) {
+ myNotificationArea.hide();
+ return;
+ }
final Application application = ApplicationManager.getApplication();
application.executeOnPooledThread(new Runnable() {
@Override
|
60eee421aac8d7ebbe44070979da88eb4e7371d7
|
hbase
|
HBASE-2156 HBASE-2037 broke Scan--git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@902213 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index 3e52e955d72d..66855c92e243 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -188,6 +188,7 @@ Release 0.21.0 - Unreleased
HBASE-2154 Fix Client#next(int) javadoc
HBASE-2152 Add default jmxremote.{access|password} files into conf
(Lars George and Gary Helmling via Stack)
+ HBASE-2156 HBASE-2037 broke Scan - only a test for trunk
IMPROVEMENTS
HBASE-1760 Cleanup TODOs in HTable
diff --git a/src/test/org/apache/hadoop/hbase/client/TestFromClientSide.java b/src/test/org/apache/hadoop/hbase/client/TestFromClientSide.java
index d5b97eca90cf..5227a050129a 100644
--- a/src/test/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/src/test/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -3477,4 +3477,21 @@ public void testGetClosestRowBefore() throws IOException {
assertTrue(result.containsColumn(HConstants.CATALOG_FAMILY, null));
assertTrue(Bytes.equals(result.getValue(HConstants.CATALOG_FAMILY, null), one));
}
+
+ /**
+ * For HBASE-2156
+ * @throws Exception
+ */
+ public void testScanVariableReuse() throws Exception {
+ Scan scan = new Scan();
+ scan.addFamily(FAMILY);
+ scan.addColumn(FAMILY, ROW);
+
+ assertTrue(scan.getFamilyMap().get(FAMILY).size() == 1);
+
+ scan = new Scan();
+ scan.addFamily(FAMILY);
+
+ assertTrue(scan.getFamilyMap().get(FAMILY).size() == 0);
+ }
}
\ No newline at end of file
|
4997182d07c7b7efdcd17b8deccf73fd23a3671b
|
camel
|
CAMEL-3240 Fixed camel-core build error.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1058911 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java b/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java
index d6068c338e057..90ea447bfa569 100644
--- a/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java
+++ b/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java
@@ -740,7 +740,8 @@ public synchronized boolean stopRoute(String routeId, long timeout, TimeUnit tim
stopRouteService(routeService);
}
return completed;
- }
+ }
+ return false;
}
public synchronized void stopRoute(String routeId) throws Exception {
|
cfdb09b7cbb5ea1732416da7ce45c78ac4c0849b
|
hadoop
|
YARN-578. Fixed NM to use SecureIOUtils for reading- and aggregating logs. Contributed by Omkar Vinit Joshi. svn merge- --ignore-ancestry -c 1487672 ../../trunk/--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1487686 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index ba3068ef40db6..5bd00122d74be 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -348,6 +348,9 @@ Release 2.0.5-beta - UNRELEASED
YARN-715. Fixed unit test failures - TestDistributedShell and
TestUnmanagedAMLauncher. (Vinod Kumar Vavilapalli via sseth)
+ YARN-578. Fixed NM to use SecureIOUtils for reading and aggregating logs.
+ (Omkar Vinit Joshi via vinodkv)
+
BREAKDOWN OF HADOOP-8562 SUBTASKS AND RELATED JIRAS
YARN-158. Yarn creating package-info.java must not depend on sh.
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java
index c519f1795957e..185020dc4a3ce 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java
@@ -25,8 +25,8 @@
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
-import java.io.InputStreamReader;
import java.io.IOException;
+import java.io.InputStreamReader;
import java.io.PrintStream;
import java.io.Writer;
import java.security.PrivilegedExceptionAction;
@@ -50,6 +50,7 @@
import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.io.SecureIOUtils;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.file.tfile.TFile;
import org.apache.hadoop.security.UserGroupInformation;
@@ -137,12 +138,15 @@ public static class LogValue {
private final List<String> rootLogDirs;
private final ContainerId containerId;
+ private final String user;
// TODO Maybe add a version string here. Instead of changing the version of
// the entire k-v format
- public LogValue(List<String> rootLogDirs, ContainerId containerId) {
+ public LogValue(List<String> rootLogDirs, ContainerId containerId,
+ String user) {
this.rootLogDirs = new ArrayList<String>(rootLogDirs);
this.containerId = containerId;
+ this.user = user;
// Ensure logs are processed in lexical order
Collections.sort(this.rootLogDirs);
@@ -177,18 +181,30 @@ public void write(DataOutputStream out) throws IOException {
// Write the log itself
FileInputStream in = null;
try {
- in = new FileInputStream(logFile);
+ in = SecureIOUtils.openForRead(logFile, getUser(), null);
byte[] buf = new byte[65535];
int len = 0;
while ((len = in.read(buf)) != -1) {
out.write(buf, 0, len);
}
+ } catch (IOException e) {
+ String message = "Error aggregating log file. Log file : "
+ + logFile.getAbsolutePath() + e.getMessage();
+ LOG.error(message, e);
+ out.write(message.getBytes());
} finally {
- in.close();
+ if (in != null) {
+ in.close();
+ }
}
}
}
}
+
+ // Added for testing purpose.
+ public String getUser() {
+ return user;
+ }
}
public static class LogWriter {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java
index de755a721564e..248ec3145bd75 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java
@@ -18,13 +18,21 @@
package org.apache.hadoop.yarn.logaggregation;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.File;
+import java.io.FileNotFoundException;
import java.io.FileOutputStream;
+import java.io.FileReader;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.StringWriter;
+import java.io.UnsupportedEncodingException;
import java.io.Writer;
+import java.util.Arrays;
import java.util.Collections;
import junit.framework.Assert;
@@ -32,11 +40,14 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.io.nativeio.NativeIO;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogReader;
@@ -44,6 +55,7 @@
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogWriter;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.After;
+import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
@@ -97,7 +109,7 @@ public void testReadAcontainerLogs1() throws Exception {
LogKey logKey = new LogKey(testContainerId);
LogValue logValue =
new LogValue(Collections.singletonList(srcFileRoot.toString()),
- testContainerId);
+ testContainerId, ugi.getShortUserName());
logWriter.append(logKey, logValue);
logWriter.closeWriter();
@@ -131,9 +143,115 @@ public void testReadAcontainerLogs1() throws Exception {
Assert.assertEquals(expectedLength, s.length());
}
+ @Test(timeout=10000)
+ public void testContainerLogsFileAccess() throws IOException {
+ // This test will run only if NativeIO is enabled as SecureIOUtils
+ // require it to be enabled.
+ Assume.assumeTrue(NativeIO.isAvailable());
+ Configuration conf = new Configuration();
+ conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
+ "kerberos");
+ UserGroupInformation.setConfiguration(conf);
+ File workDir = new File(testWorkDir, "testContainerLogsFileAccess1");
+ Path remoteAppLogFile =
+ new Path(workDir.getAbsolutePath(), "aggregatedLogFile");
+ Path srcFileRoot = new Path(workDir.getAbsolutePath(), "srcFiles");
+
+ String data = "Log File content for container : ";
+ // Creating files for container1. Log aggregator will try to read log files
+ // with illegal user.
+ ContainerId testContainerId1 = BuilderUtils.newContainerId(1, 1, 1, 1);
+ Path appDir =
+ new Path(srcFileRoot, testContainerId1.getApplicationAttemptId()
+ .getApplicationId().toString());
+ Path srcFilePath1 = new Path(appDir, testContainerId1.toString());
+ String stdout = "stdout";
+ String stderr = "stderr";
+ writeSrcFile(srcFilePath1, stdout, data + testContainerId1.toString()
+ + stdout);
+ writeSrcFile(srcFilePath1, stderr, data + testContainerId1.toString()
+ + stderr);
+
+ UserGroupInformation ugi =
+ UserGroupInformation.getCurrentUser();
+ LogWriter logWriter = new LogWriter(conf, remoteAppLogFile, ugi);
+
+ LogKey logKey = new LogKey(testContainerId1);
+ String randomUser = "randomUser";
+ LogValue logValue =
+ spy(new LogValue(Collections.singletonList(srcFileRoot.toString()),
+ testContainerId1, randomUser));
+
+ // It is trying simulate a situation where first log file is owned by
+ // different user (probably symlink) and second one by the user itself.
+ when(logValue.getUser()).thenReturn(randomUser).thenReturn(
+ ugi.getShortUserName());
+ logWriter.append(logKey, logValue);
+
+ logWriter.closeWriter();
+
+ BufferedReader in =
+ new BufferedReader(new FileReader(new File(remoteAppLogFile
+ .toUri().getRawPath())));
+ String line;
+ StringBuffer sb = new StringBuffer("");
+ while ((line = in.readLine()) != null) {
+ LOG.info(line);
+ sb.append(line);
+ }
+ line = sb.toString();
+
+ String stdoutFile1 =
+ StringUtils.join(
+ Path.SEPARATOR,
+ Arrays.asList(new String[] {
+ srcFileRoot.toUri().toString(),
+ testContainerId1.getApplicationAttemptId().getApplicationId()
+ .toString(), testContainerId1.toString(), stderr }));
+ String message1 =
+ "Owner '" + ugi.getShortUserName() + "' for path " + stdoutFile1
+ + " did not match expected owner '" + randomUser + "'";
+
+ String stdoutFile2 =
+ StringUtils.join(
+ Path.SEPARATOR,
+ Arrays.asList(new String[] {
+ srcFileRoot.toUri().toString(),
+ testContainerId1.getApplicationAttemptId().getApplicationId()
+ .toString(), testContainerId1.toString(), stdout }));
+ String message2 =
+ "Owner '" + ugi.getShortUserName() + "' for path "
+ + stdoutFile2 + " did not match expected owner '"
+ + ugi.getShortUserName() + "'";
+
+ Assert.assertTrue(line.contains(message1));
+ Assert.assertFalse(line.contains(message2));
+ Assert.assertFalse(line.contains(data + testContainerId1.toString()
+ + stderr));
+ Assert.assertTrue(line.contains(data + testContainerId1.toString()
+ + stdout));
+ }
private void writeSrcFile(Path srcFilePath, String fileName, long length)
throws IOException {
+ OutputStreamWriter osw = getOutputStreamWriter(srcFilePath, fileName);
+ int ch = filler;
+ for (int i = 0; i < length; i++) {
+ osw.write(ch);
+ }
+ osw.close();
+ }
+
+ private void writeSrcFile(Path srcFilePath, String fileName, String data)
+ throws IOException {
+ OutputStreamWriter osw = getOutputStreamWriter(srcFilePath, fileName);
+ osw.write(data);
+ osw.close();
+ }
+
+ private OutputStreamWriter getOutputStreamWriter(Path srcFilePath,
+ String fileName) throws IOException, FileNotFoundException,
+ UnsupportedEncodingException {
File dir = new File(srcFilePath.toString());
if (!dir.exists()) {
if (!dir.mkdirs()) {
@@ -143,10 +261,6 @@ private void writeSrcFile(Path srcFilePath, String fileName, long length)
File outputFile = new File(new File(srcFilePath.toString()), fileName);
FileOutputStream os = new FileOutputStream(outputFile);
OutputStreamWriter osw = new OutputStreamWriter(os, "UTF8");
- int ch = filler;
- for (int i = 0; i < length; i++) {
- osw.write(ch);
- }
- osw.close();
+ return osw;
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/resources/krb5.conf b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/resources/krb5.conf
new file mode 100644
index 0000000000000..121ac6d9b981a
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/resources/krb5.conf
@@ -0,0 +1,28 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+[libdefaults]
+ default_realm = APACHE.ORG
+ udp_preference_limit = 1
+ extra_addresses = 127.0.0.1
+[realms]
+ APACHE.ORG = {
+ admin_server = localhost:88
+ kdc = localhost:88
+ }
+[domain_realm]
+ localhost = APACHE.ORG
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
index f9a0558563df4..6ef794442c3e1 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
@@ -123,7 +123,9 @@ private void uploadLogsForContainer(ContainerId containerId) {
+ ". Current good log dirs are "
+ StringUtils.join(",", dirsHandler.getLogDirs()));
LogKey logKey = new LogKey(containerId);
- LogValue logValue = new LogValue(dirsHandler.getLogDirs(), containerId);
+ LogValue logValue =
+ new LogValue(dirsHandler.getLogDirs(), containerId,
+ userUgi.getShortUserName());
try {
this.writer.append(logKey, logValue);
} catch (IOException e) {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java
index 5fdd9577d0fb0..452a8237cb85e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java
@@ -39,8 +39,8 @@
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.SecureIOUtils;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
@@ -52,8 +52,8 @@
import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.util.ConverterUtils;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.SubView;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.PRE;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
@@ -228,6 +228,27 @@ private void printLogs(Block html, ContainerId containerId,
return;
} else {
FileInputStream logByteStream = null;
+
+ try {
+ logByteStream =
+ SecureIOUtils.openForRead(logFile, application.getUser(), null);
+ } catch (IOException e) {
+ LOG.error(
+ "Exception reading log file " + logFile.getAbsolutePath(), e);
+ if (e.getMessage().contains(
+ "did not match expected owner '" + application.getUser()
+ + "'")) {
+ html.h1("Exception reading log file. Application submitted by '"
+ + application.getUser()
+ + "' doesn't own requested log file : "
+ + logFile.getName());
+ } else {
+ html.h1("Exception reading log file. It might be because log "
+ + "file was aggregated : " + logFile.getName());
+ }
+ return;
+ }
+
try {
long toRead = end - start;
if (toRead < logFile.length()) {
@@ -236,11 +257,8 @@ private void printLogs(Block html, ContainerId containerId,
logFile.getName(), "?start=0"), "here").
_(" for full log")._();
}
- // TODO: Use secure IO Utils to avoid symlink attacks.
// TODO Fix findBugs close warning along with IOUtils change
- logByteStream = new FileInputStream(logFile);
IOUtils.skipFully(logByteStream, start);
-
InputStreamReader reader = new InputStreamReader(logByteStream);
int bufferSize = 65536;
char[] cbuf = new char[bufferSize];
@@ -260,8 +278,10 @@ private void printLogs(Block html, ContainerId containerId,
reader.close();
} catch (IOException e) {
- html.h1("Exception reading log-file. Log file was likely aggregated. "
- + StringUtils.stringifyException(e));
+ LOG.error(
+ "Exception reading log file " + logFile.getAbsolutePath(), e);
+ html.h1("Exception reading log file. It might be because log "
+ + "file was aggregated : " + logFile.getName());
} finally {
if (logByteStream != null) {
try {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
index 43a5401ab4256..3fa594a889552 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation;
+import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.*;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertTrue;
@@ -126,6 +127,7 @@ public void tearDown() throws IOException, InterruptedException {
@SuppressWarnings("unchecked")
public void testLocalFileDeletionAfterUpload() throws Exception {
this.delSrvc = new DeletionService(createContainerExecutor());
+ delSrvc = spy(delSrvc);
this.delSrvc.init(conf);
this.conf.set(YarnConfiguration.NM_LOG_DIRS, localLogDir.getAbsolutePath());
this.conf.set(YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
@@ -169,7 +171,8 @@ public void testLocalFileDeletionAfterUpload() throws Exception {
// ensure filesystems were closed
verify(logAggregationService).closeFileSystems(
any(UserGroupInformation.class));
-
+ verify(delSrvc).delete(eq(user), eq((Path) null),
+ eq(new Path(app1LogDir.getAbsolutePath())));
delSrvc.stop();
String containerIdStr = ConverterUtils.toString(container11);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestContainerLogsPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestContainerLogsPage.java
index 459493959ade7..76be0a2342303 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestContainerLogsPage.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestContainerLogsPage.java
@@ -18,27 +18,48 @@
package org.apache.hadoop.yarn.server.nodemanager.webapp;
+import static org.junit.Assume.assumeTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
+import static org.mockito.Mockito.verify;
+import java.io.BufferedOutputStream;
import java.io.File;
+import java.io.FileOutputStream;
import java.io.IOException;
+import java.io.PrintWriter;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.io.nativeio.NativeIO;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService;
import org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.ContainerLogsPage.ContainersLogsBlock;
+import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.util.BuilderUtils;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
+import org.apache.hadoop.yarn.webapp.test.WebAppTests;
import org.junit.Assert;
import org.junit.Test;
+import com.google.inject.Injector;
+import com.google.inject.Module;
+
public class TestContainerLogsPage {
@Test(timeout=30000)
@@ -69,4 +90,99 @@ public void testContainerLogDirs() throws IOException {
container1, dirsHandler);
Assert.assertTrue(!(files.get(0).toString().contains("file:")));
}
+
+ @Test(timeout = 10000)
+ public void testContainerLogPageAccess() throws IOException {
+ // SecureIOUtils require Native IO to be enabled. This test will run
+ // only if it is enabled.
+ assumeTrue(NativeIO.isAvailable());
+ String user = "randomUser" + System.currentTimeMillis();
+ File absLogDir = null, appDir = null, containerDir = null, syslog = null;
+ try {
+ // target log directory
+ absLogDir =
+ new File("target", TestContainerLogsPage.class.getSimpleName()
+ + "LogDir").getAbsoluteFile();
+ absLogDir.mkdir();
+
+ Configuration conf = new Configuration();
+ conf.set(YarnConfiguration.NM_LOG_DIRS, absLogDir.toURI().toString());
+ conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
+ "kerberos");
+ UserGroupInformation.setConfiguration(conf);
+
+ NodeHealthCheckerService healthChecker = new NodeHealthCheckerService();
+ healthChecker.init(conf);
+ LocalDirsHandlerService dirsHandler = healthChecker.getDiskHandler();
+ // Add an application and the corresponding containers
+ RecordFactory recordFactory =
+ RecordFactoryProvider.getRecordFactory(conf);
+ long clusterTimeStamp = 1234;
+ ApplicationId appId =
+ BuilderUtils.newApplicationId(recordFactory, clusterTimeStamp, 1);
+ Application app = mock(Application.class);
+ when(app.getAppId()).thenReturn(appId);
+
+ // Making sure that application returns a random user. This is required
+ // for SecureIOUtils' file owner check.
+ when(app.getUser()).thenReturn(user);
+
+ ApplicationAttemptId appAttemptId =
+ BuilderUtils.newApplicationAttemptId(appId, 1);
+ ContainerId container1 =
+ BuilderUtils.newContainerId(recordFactory, appId, appAttemptId, 0);
+
+ // Testing secure read access for log files
+
+ // Creating application and container directory and syslog file.
+ appDir = new File(absLogDir, appId.toString());
+ appDir.mkdir();
+ containerDir = new File(appDir, container1.toString());
+ containerDir.mkdir();
+ syslog = new File(containerDir, "syslog");
+ syslog.createNewFile();
+ BufferedOutputStream out =
+ new BufferedOutputStream(new FileOutputStream(syslog));
+ out.write("Log file Content".getBytes());
+ out.close();
+
+ ApplicationACLsManager aclsManager = mock(ApplicationACLsManager.class);
+
+ Context context = mock(Context.class);
+ ConcurrentMap<ApplicationId, Application> appMap =
+ new ConcurrentHashMap<ApplicationId, Application>();
+ appMap.put(appId, app);
+ when(context.getApplications()).thenReturn(appMap);
+ when(context.getContainers()).thenReturn(
+ new ConcurrentHashMap<ContainerId, Container>());
+
+ ContainersLogsBlock cLogsBlock =
+ new ContainersLogsBlock(conf, context, aclsManager, dirsHandler);
+
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(YarnWebParams.CONTAINER_ID, container1.toString());
+ params.put(YarnWebParams.CONTAINER_LOG_TYPE, "syslog");
+
+ Injector injector =
+ WebAppTests.testPage(ContainerLogsPage.class,
+ ContainersLogsBlock.class, cLogsBlock, params, (Module[])null);
+ PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
+ verify(spyPw).write(
+ "Exception reading log file. Application submitted by '" + user
+ + "' doesn't own requested log file : syslog");
+ } finally {
+ if (syslog != null) {
+ syslog.delete();
+ }
+ if (containerDir != null) {
+ containerDir.delete();
+ }
+ if (appDir != null) {
+ appDir.delete();
+ }
+ if (absLogDir != null) {
+ absLogDir.delete();
+ }
+ }
+ }
}
|
9f9b972f00afb8f851b0e12852d6c86432ca090f
|
spring-framework
|
Prepared for JDK 1.9--Aiming for the JdkVersion class to support one generation ahead now, in order for the test suite to properly detect 1.7/1.8+ JVMs even when running against a JDK 1.9 preview at some point.-
|
p
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-core/src/main/java/org/springframework/core/JdkVersion.java b/spring-core/src/main/java/org/springframework/core/JdkVersion.java
index 9204ad184f06..a7c20f32c5d9 100644
--- a/spring-core/src/main/java/org/springframework/core/JdkVersion.java
+++ b/spring-core/src/main/java/org/springframework/core/JdkVersion.java
@@ -59,6 +59,11 @@ public abstract class JdkVersion {
*/
public static final int JAVA_18 = 5;
+ /**
+ * Constant identifying the 1.9 JVM (Java 9).
+ */
+ public static final int JAVA_19 = 5;
+
private static final String javaVersion;
@@ -67,7 +72,10 @@ public abstract class JdkVersion {
static {
javaVersion = System.getProperty("java.version");
// version String should look like "1.4.2_10"
- if (javaVersion.contains("1.8.")) {
+ if (javaVersion.contains("1.9.")) {
+ majorJavaVersion = JAVA_19;
+ }
+ else if (javaVersion.contains("1.8.")) {
majorJavaVersion = JAVA_18;
}
else if (javaVersion.contains("1.7.")) {
|
cd2f14637bcecec0081104d749cd1bf10f28b07d
|
ReactiveX-RxJava
|
Fixed issue -799 - Added break to possibly-infinite- loop in CompositeException.attachCallingThreadStack--
|
c
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/util/CompositeException.java b/rxjava-core/src/main/java/rx/util/CompositeException.java
index bca5dcfbf7..439b9400b2 100644
--- a/rxjava-core/src/main/java/rx/util/CompositeException.java
+++ b/rxjava-core/src/main/java/rx/util/CompositeException.java
@@ -18,7 +18,9 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
+import java.util.HashSet;
import java.util.List;
+import java.util.Set;
/**
* Exception that is a composite of 1 or more other exceptions.
@@ -84,9 +86,16 @@ private static String getStackTraceAsString(StackTraceElement[] stack) {
return s.toString();
}
- private static void attachCallingThreadStack(Throwable e, Throwable cause) {
+ /* package-private */ static void attachCallingThreadStack(Throwable e, Throwable cause) {
+ Set<Throwable> seenCauses = new HashSet<Throwable>();
+
while (e.getCause() != null) {
e = e.getCause();
+ if (seenCauses.contains(e.getCause())) {
+ break;
+ } else {
+ seenCauses.add(e.getCause());
+ }
}
// we now have 'e' as the last in the chain
try {
@@ -98,12 +107,13 @@ private static void attachCallingThreadStack(Throwable e, Throwable cause) {
}
}
- private final static class CompositeExceptionCausalChain extends RuntimeException {
+ /* package-private */ final static class CompositeExceptionCausalChain extends RuntimeException {
private static final long serialVersionUID = 3875212506787802066L;
+ /* package-private */ static String MESSAGE = "Chain of Causes for CompositeException In Order Received =>";
@Override
public String getMessage() {
- return "Chain of Causes for CompositeException In Order Received =>";
+ return MESSAGE;
}
}
diff --git a/rxjava-core/src/test/java/rx/util/CompositeExceptionTest.java b/rxjava-core/src/test/java/rx/util/CompositeExceptionTest.java
new file mode 100644
index 0000000000..0e80cf0309
--- /dev/null
+++ b/rxjava-core/src/test/java/rx/util/CompositeExceptionTest.java
@@ -0,0 +1,70 @@
+/**
+ * Copyright 2013 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package rx.util;
+
+import static org.junit.Assert.*;
+
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class CompositeExceptionTest {
+
+ private final Throwable ex1 = new Throwable("Ex1");
+ private final Throwable ex2 = new Throwable("Ex2", ex1);
+ private final Throwable ex3 = new Throwable("Ex3", ex2);
+
+ private final CompositeException compositeEx;
+
+ public CompositeExceptionTest() {
+ List<Throwable> throwables = new ArrayList<Throwable>();
+ throwables.add(ex1);
+ throwables.add(ex2);
+ throwables.add(ex3);
+ compositeEx = new CompositeException(throwables);
+ }
+
+ @Test
+ public void testAttachCallingThreadStackParentThenChild() {
+ CompositeException.attachCallingThreadStack(ex1, ex2);
+ assertEquals("Ex2", ex1.getCause().getMessage());
+ }
+
+ @Test
+ public void testAttachCallingThreadStackChildThenParent() {
+ CompositeException.attachCallingThreadStack(ex2, ex1);
+ assertEquals("Ex1", ex2.getCause().getMessage());
+ }
+
+ @Test
+ public void testAttachCallingThreadStackAddComposite() {
+ CompositeException.attachCallingThreadStack(ex1, compositeEx);
+ assertEquals("Ex2", ex1.getCause().getMessage());
+ }
+
+ @Test
+ public void testAttachCallingThreadStackAddToComposite() {
+ CompositeException.attachCallingThreadStack(compositeEx, ex1);
+ assertEquals(CompositeException.CompositeExceptionCausalChain.MESSAGE, compositeEx.getCause().getMessage());
+ }
+
+ @Test
+ public void testAttachCallingThreadStackAddCompositeToItself() {
+ CompositeException.attachCallingThreadStack(compositeEx, compositeEx);
+ assertEquals(CompositeException.CompositeExceptionCausalChain.MESSAGE, compositeEx.getCause().getMessage());
+ }
+}
\ No newline at end of file
|
12954a69895db8e60522c72557b93c53973dc436
|
ReactiveX-RxJava
|
Extract UnsubscribeTester to top level--
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/subjects/RepeatSubject.java b/rxjava-core/src/main/java/rx/subjects/RepeatSubject.java
index a5f1470f6b..1d0eb6f145 100644
--- a/rxjava-core/src/main/java/rx/subjects/RepeatSubject.java
+++ b/rxjava-core/src/main/java/rx/subjects/RepeatSubject.java
@@ -2,10 +2,10 @@
import org.junit.Test;
import org.mockito.Mockito;
-import rx.Observable;
import rx.Observer;
import rx.Subscription;
import rx.subscriptions.Subscriptions;
+import rx.testing.UnsubscribeTester;
import rx.util.functions.Func1;
import java.util.ArrayList;
@@ -14,7 +14,6 @@
import java.util.List;
import java.util.Map;
-import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
@@ -270,8 +269,8 @@ private void assertObservedUntilTwo(Observer<String> aObserver)
public void testUnsubscribeFromOnNext() {
RepeatSubject<Object> subject = RepeatSubject.create();
- UnsubscribeTest test1 = UnsubscribeTest.createOnNext(subject);
- UnsubscribeTest test2 = UnsubscribeTest.createOnNext(subject);
+ UnsubscribeTester test1 = UnsubscribeTester.createOnNext(subject);
+ UnsubscribeTester test2 = UnsubscribeTester.createOnNext(subject);
subject.onNext("one");
@@ -283,8 +282,8 @@ public void testUnsubscribeFromOnNext() {
public void testUnsubscribeFromOnCompleted() {
RepeatSubject<Object> subject = RepeatSubject.create();
- UnsubscribeTest test1 = UnsubscribeTest.createOnCompleted(subject);
- UnsubscribeTest test2 = UnsubscribeTest.createOnCompleted(subject);
+ UnsubscribeTester test1 = UnsubscribeTester.createOnCompleted(subject);
+ UnsubscribeTester test2 = UnsubscribeTester.createOnCompleted(subject);
subject.onCompleted();
@@ -296,8 +295,8 @@ public void testUnsubscribeFromOnCompleted() {
public void testUnsubscribeFromOnError() {
RepeatSubject<Object> subject = RepeatSubject.create();
- UnsubscribeTest test1 = UnsubscribeTest.createOnError(subject);
- UnsubscribeTest test2 = UnsubscribeTest.createOnError(subject);
+ UnsubscribeTester test1 = UnsubscribeTester.createOnError(subject);
+ UnsubscribeTester test2 = UnsubscribeTester.createOnError(subject);
subject.onError(new Exception());
@@ -305,100 +304,5 @@ public void testUnsubscribeFromOnError() {
test2.assertPassed();
}
- private static class UnsubscribeTest
- {
- private Subscription subscription;
-
- private UnsubscribeTest() {}
-
- public static <T> UnsubscribeTest createOnNext(Observable<T> observable)
- {
- final UnsubscribeTest test = new UnsubscribeTest();
- test.setSubscription(observable.subscribe(new Observer<T>()
- {
- @Override
- public void onCompleted()
- {
- }
-
- @Override
- public void onError(Exception e)
- {
- }
-
- @Override
- public void onNext(T args)
- {
- test.doUnsubscribe();
- }
- }));
- return test;
- }
-
- public static <T> UnsubscribeTest createOnCompleted(Observable<T> observable)
- {
- final UnsubscribeTest test = new UnsubscribeTest();
- test.setSubscription(observable.subscribe(new Observer<T>()
- {
- @Override
- public void onCompleted()
- {
- test.doUnsubscribe();
- }
-
- @Override
- public void onError(Exception e)
- {
- }
-
- @Override
- public void onNext(T args)
- {
- }
- }));
- return test;
- }
-
- public static <T> UnsubscribeTest createOnError(Observable<T> observable)
- {
- final UnsubscribeTest test = new UnsubscribeTest();
- test.setSubscription(observable.subscribe(new Observer<T>()
- {
- @Override
- public void onCompleted()
- {
- }
-
- @Override
- public void onError(Exception e)
- {
- test.doUnsubscribe();
- }
-
- @Override
- public void onNext(T args)
- {
- }
- }));
- return test;
- }
-
- private void setSubscription(Subscription subscription)
- {
- this.subscription = subscription;
- }
-
- private void doUnsubscribe()
- {
- Subscription subscription = this.subscription;
- this.subscription = null;
- subscription.unsubscribe();
- }
-
- public void assertPassed()
- {
- assertTrue("expected notification was received", subscription == null);
- }
- }
}
}
diff --git a/rxjava-core/src/main/java/rx/testing/UnsubscribeTester.java b/rxjava-core/src/main/java/rx/testing/UnsubscribeTester.java
new file mode 100644
index 0000000000..e1988c9093
--- /dev/null
+++ b/rxjava-core/src/main/java/rx/testing/UnsubscribeTester.java
@@ -0,0 +1,103 @@
+package rx.testing;
+
+import rx.Observable;
+import rx.Observer;
+import rx.Subscription;
+
+import static org.junit.Assert.assertTrue;
+
+public class UnsubscribeTester
+{
+ private Subscription subscription;
+
+ public UnsubscribeTester() {}
+
+ public static <T> UnsubscribeTester createOnNext(Observable<T> observable)
+ {
+ final UnsubscribeTester test = new UnsubscribeTester();
+ test.setSubscription(observable.subscribe(new Observer<T>()
+ {
+ @Override
+ public void onCompleted()
+ {
+ }
+
+ @Override
+ public void onError(Exception e)
+ {
+ }
+
+ @Override
+ public void onNext(T args)
+ {
+ test.doUnsubscribe();
+ }
+ }));
+ return test;
+ }
+
+ public static <T> UnsubscribeTester createOnCompleted(Observable<T> observable)
+ {
+ final UnsubscribeTester test = new UnsubscribeTester();
+ test.setSubscription(observable.subscribe(new Observer<T>()
+ {
+ @Override
+ public void onCompleted()
+ {
+ test.doUnsubscribe();
+ }
+
+ @Override
+ public void onError(Exception e)
+ {
+ }
+
+ @Override
+ public void onNext(T args)
+ {
+ }
+ }));
+ return test;
+ }
+
+ public static <T> UnsubscribeTester createOnError(Observable<T> observable)
+ {
+ final UnsubscribeTester test = new UnsubscribeTester();
+ test.setSubscription(observable.subscribe(new Observer<T>()
+ {
+ @Override
+ public void onCompleted()
+ {
+ }
+
+ @Override
+ public void onError(Exception e)
+ {
+ test.doUnsubscribe();
+ }
+
+ @Override
+ public void onNext(T args)
+ {
+ }
+ }));
+ return test;
+ }
+
+ private void setSubscription(Subscription subscription)
+ {
+ this.subscription = subscription;
+ }
+
+ private void doUnsubscribe()
+ {
+ Subscription subscription = this.subscription;
+ this.subscription = null;
+ subscription.unsubscribe();
+ }
+
+ public void assertPassed()
+ {
+ assertTrue("expected notification was received", subscription == null);
+ }
+}
|
61b7b2defd9eccd914bde6b94c9fa978579eb4b3
|
restlet-framework-java
|
- Added Metadate-getParent() method. - Added- better Variant-toString() implementation.--
|
p
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/build/tmpl/text/changes.txt b/build/tmpl/text/changes.txt
index b5660106d4..547df6c108 100644
--- a/build/tmpl/text/changes.txt
+++ b/build/tmpl/text/changes.txt
@@ -85,7 +85,9 @@ Changes log
authenticated requests.
- Deprecated Request#setChallengeRequest().
Use getChallengeRequests() instead.
- - Misc
+ - Added Metadate#getParent() method.
+ - Added better Variant#toString() implementation.
+ - Misc
- Exceptions thrown in the internal HTTP client are not logged
with a FINE level instead of a WARNING level.
- Upgraded Simple to version 4.1.12.
diff --git a/modules/org.restlet.ext.atom/META-INF/MANIFEST.MF b/modules/org.restlet.ext.atom/META-INF/MANIFEST.MF
index 03aa1ab260..f1d44385f4 100644
--- a/modules/org.restlet.ext.atom/META-INF/MANIFEST.MF
+++ b/modules/org.restlet.ext.atom/META-INF/MANIFEST.MF
@@ -12,6 +12,7 @@ Export-Package: org.restlet.ext.atom;
Import-Package: org.restlet,
org.restlet.data,
org.restlet.engine.converter,
+ org.restlet.engine.resource,
org.restlet.engine.util,
org.restlet.ext.xml,
org.restlet.representation,
diff --git a/modules/org.restlet.ext.gwt/META-INF/MANIFEST.MF b/modules/org.restlet.ext.gwt/META-INF/MANIFEST.MF
index 433663e416..0b8dd77928 100644
--- a/modules/org.restlet.ext.gwt/META-INF/MANIFEST.MF
+++ b/modules/org.restlet.ext.gwt/META-INF/MANIFEST.MF
@@ -14,6 +14,7 @@ Import-Package: com.google.gwt.user.client.rpc,
org.restlet.data,
org.restlet.engine,
org.restlet.engine.converter,
+ org.restlet.engine.resource,
org.restlet.ext.servlet,
org.restlet.representation,
org.restlet.resource
diff --git a/modules/org.restlet.ext.json/META-INF/MANIFEST.MF b/modules/org.restlet.ext.json/META-INF/MANIFEST.MF
index 5f775e03aa..6ed70941ad 100644
--- a/modules/org.restlet.ext.json/META-INF/MANIFEST.MF
+++ b/modules/org.restlet.ext.json/META-INF/MANIFEST.MF
@@ -9,6 +9,7 @@ Import-Package: org.json,
org.restlet,
org.restlet.data,
org.restlet.engine.converter,
+ org.restlet.engine.resource,
org.restlet.representation,
org.restlet.resource,
org.restlet.service,
diff --git a/modules/org.restlet.ext.xml/META-INF/MANIFEST.MF b/modules/org.restlet.ext.xml/META-INF/MANIFEST.MF
index 607ee77743..96fdd5cda5 100644
--- a/modules/org.restlet.ext.xml/META-INF/MANIFEST.MF
+++ b/modules/org.restlet.ext.xml/META-INF/MANIFEST.MF
@@ -12,6 +12,7 @@ Export-Package: org.restlet.ext.xml;
Import-Package: org.restlet,
org.restlet.data,
org.restlet.engine.converter,
+ org.restlet.engine.resource,
org.restlet.engine.util,
org.restlet.representation,
org.restlet.resource,
diff --git a/modules/org.restlet.ext.xstream/META-INF/MANIFEST.MF b/modules/org.restlet.ext.xstream/META-INF/MANIFEST.MF
index 9eaa9ba68e..d540b0cdf2 100644
--- a/modules/org.restlet.ext.xstream/META-INF/MANIFEST.MF
+++ b/modules/org.restlet.ext.xstream/META-INF/MANIFEST.MF
@@ -22,6 +22,7 @@ Import-Package:
org.restlet.data,
org.restlet.engine,
org.restlet.engine.converter,
+ org.restlet.engine.resource,
org.restlet.representation,
org.restlet.resource,
org.restlet.service,
diff --git a/modules/org.restlet/src/org/restlet/data/CharacterSet.java b/modules/org.restlet/src/org/restlet/data/CharacterSet.java
index 93de913438..5412c55c2e 100644
--- a/modules/org.restlet/src/org/restlet/data/CharacterSet.java
+++ b/modules/org.restlet/src/org/restlet/data/CharacterSet.java
@@ -153,6 +153,11 @@ && getName()
.equalsIgnoreCase(((CharacterSet) object).getName());
}
+ @Override
+ public Metadata getParent() {
+ return null;
+ }
+
/** {@inheritDoc} */
@Override
public int hashCode() {
diff --git a/modules/org.restlet/src/org/restlet/data/Encoding.java b/modules/org.restlet/src/org/restlet/data/Encoding.java
index 4039651525..96ba08e630 100644
--- a/modules/org.restlet/src/org/restlet/data/Encoding.java
+++ b/modules/org.restlet/src/org/restlet/data/Encoding.java
@@ -131,6 +131,11 @@ public boolean equals(final Object object) {
&& getName().equalsIgnoreCase(((Encoding) object).getName());
}
+ @Override
+ public Metadata getParent() {
+ return null;
+ }
+
/** {@inheritDoc} */
@Override
public int hashCode() {
diff --git a/modules/org.restlet/src/org/restlet/data/Language.java b/modules/org.restlet/src/org/restlet/data/Language.java
index 298a94150c..ed1e1d0492 100644
--- a/modules/org.restlet/src/org/restlet/data/Language.java
+++ b/modules/org.restlet/src/org/restlet/data/Language.java
@@ -129,6 +129,11 @@ public boolean equals(final Object object) {
&& getName().equalsIgnoreCase(((Language) object).getName());
}
+ @Override
+ public Language getParent() {
+ return Language.valueOf(getPrimaryTag());
+ }
+
/**
* Returns the primary tag.
*
diff --git a/modules/org.restlet/src/org/restlet/data/MediaType.java b/modules/org.restlet/src/org/restlet/data/MediaType.java
index 5e4c56c54b..6bcdb7e08c 100644
--- a/modules/org.restlet/src/org/restlet/data/MediaType.java
+++ b/modules/org.restlet/src/org/restlet/data/MediaType.java
@@ -881,6 +881,11 @@ public Series<Parameter> getParameters() {
return p;
}
+ @Override
+ public MediaType getParent() {
+ return MediaType.valueOf(getMainType() + "/*");
+ }
+
/**
* Returns the sub-type.
*
diff --git a/modules/org.restlet/src/org/restlet/data/Metadata.java b/modules/org.restlet/src/org/restlet/data/Metadata.java
index 0ef5b9f9ad..9d7bb3bdff 100644
--- a/modules/org.restlet/src/org/restlet/data/Metadata.java
+++ b/modules/org.restlet/src/org/restlet/data/Metadata.java
@@ -45,13 +45,20 @@
* >Source dissertation</a>
* @author Jerome Louvel
*/
-public class Metadata {
+public abstract class Metadata {
/** The metadata name like "text/html" or "compress" or "iso-8851-1". */
private final String name;
/** The description of this metadata. */
private final String description;
+ /**
+ * Returns the parent metadata if available or null.
+ *
+ * @return The parent metadata.
+ */
+ public abstract Metadata getParent();
+
/**
* Constructor.
*
diff --git a/modules/org.restlet/src/org/restlet/representation/Variant.java b/modules/org.restlet/src/org/restlet/representation/Variant.java
index 25540d78c3..45505b1b62 100644
--- a/modules/org.restlet/src/org/restlet/representation/Variant.java
+++ b/modules/org.restlet/src/org/restlet/representation/Variant.java
@@ -69,6 +69,59 @@ public class Variant {
/** The media type. */
private volatile MediaType mediaType;
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("[");
+ boolean first = true;
+
+ if (getIdentifier() != null) {
+ sb.append(getIdentifier());
+
+ if (first) {
+ first = false;
+ } else {
+ sb.append(",");
+ }
+ }
+
+ if (getMediaType() != null) {
+ sb.append(getMediaType());
+
+ if (first) {
+ first = false;
+ } else {
+ sb.append(",");
+ }
+ }
+
+ if (getCharacterSet() != null) {
+ sb.append(getCharacterSet());
+
+ if (first) {
+ first = false;
+ } else {
+ sb.append(",");
+ }
+ }
+
+ if (!getLanguages().isEmpty()) {
+ sb.append(getLanguages());
+
+ if (first) {
+ first = false;
+ } else {
+ sb.append(",");
+ }
+ }
+
+ if (!getEncodings().isEmpty()) {
+ sb.append(getEncodings());
+ }
+
+ sb.append("]");
+ return sb.toString();
+ }
+
/**
* Default constructor.
*/
|
7d26cffb0cb21c16b335e8ae9b02523565ad3b5d
|
hadoop
|
YARN-2819. NPE in ATS Timeline Domains when- upgrading from 2.4 to 2.6. Contributed by Zhijie Shen--(cherry picked from commit 4a114dd67aae83e5bb2d65470166de954acf36a2)-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index f3c9d4e207307..bb94797db81cd 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -884,6 +884,9 @@ Release 2.6.0 - UNRELEASED
YARN-2825. Container leak on NM (Jian He via jlowe)
+ YARN-2819. NPE in ATS Timeline Domains when upgrading from 2.4 to 2.6.
+ (Zhijie Shen via xgong)
+
Release 2.5.2 - UNRELEASED
INCOMPATIBLE CHANGES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java
index e1f790d9da768..c4ea9960ad739 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java
@@ -792,7 +792,8 @@ private TimelineEntities getEntityByTime(byte[] base,
* Put a single entity. If there is an error, add a TimelinePutError to the
* given response.
*/
- private void put(TimelineEntity entity, TimelinePutResponse response) {
+ private void put(TimelineEntity entity, TimelinePutResponse response,
+ boolean allowEmptyDomainId) {
LockMap.CountingReentrantLock<EntityIdentifier> lock =
writeLocks.getLock(new EntityIdentifier(entity.getEntityId(),
entity.getEntityType()));
@@ -867,10 +868,18 @@ private void put(TimelineEntity entity, TimelinePutResponse response) {
new EntityIdentifier(relatedEntityId, relatedEntityType));
continue;
} else {
+ // This is the existing entity
byte[] domainIdBytes = db.get(createDomainIdKey(
relatedEntityId, relatedEntityType, relatedEntityStartTime));
- // This is the existing entity
- String domainId = new String(domainIdBytes);
+ // The timeline data created by the server before 2.6 won't have
+ // the domain field. We assume this timeline data is in the
+ // default timeline domain.
+ String domainId = null;
+ if (domainIdBytes == null) {
+ domainId = TimelineDataManager.DEFAULT_DOMAIN_ID;
+ } else {
+ domainId = new String(domainIdBytes);
+ }
if (!domainId.equals(entity.getDomainId())) {
// in this case the entity will be put, but the relation will be
// ignored
@@ -923,12 +932,14 @@ private void put(TimelineEntity entity, TimelinePutResponse response) {
entity.getEntityType(), revStartTime);
if (entity.getDomainId() == null ||
entity.getDomainId().length() == 0) {
- TimelinePutError error = new TimelinePutError();
- error.setEntityId(entity.getEntityId());
- error.setEntityType(entity.getEntityType());
- error.setErrorCode(TimelinePutError.NO_DOMAIN);
- response.addError(error);
- return;
+ if (!allowEmptyDomainId) {
+ TimelinePutError error = new TimelinePutError();
+ error.setEntityId(entity.getEntityId());
+ error.setEntityType(entity.getEntityType());
+ error.setErrorCode(TimelinePutError.NO_DOMAIN);
+ response.addError(error);
+ return;
+ }
} else {
writeBatch.put(key, entity.getDomainId().getBytes());
writePrimaryFilterEntries(writeBatch, primaryFilters, key,
@@ -1011,7 +1022,22 @@ public TimelinePutResponse put(TimelineEntities entities) {
deleteLock.readLock().lock();
TimelinePutResponse response = new TimelinePutResponse();
for (TimelineEntity entity : entities.getEntities()) {
- put(entity, response);
+ put(entity, response, false);
+ }
+ return response;
+ } finally {
+ deleteLock.readLock().unlock();
+ }
+ }
+
+ @Private
+ @VisibleForTesting
+ public TimelinePutResponse putWithNoDomainId(TimelineEntities entities) {
+ try {
+ deleteLock.readLock().lock();
+ TimelinePutResponse response = new TimelinePutResponse();
+ for (TimelineEntity entity : entities.getEntities()) {
+ put(entity, response, true);
}
return response;
} finally {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/TimelineDataManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/TimelineDataManager.java
index 7ef0a67dac81f..888c28311579e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/TimelineDataManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/TimelineDataManager.java
@@ -124,6 +124,7 @@ public TimelineEntities getEntities(
entities.getEntities().iterator();
while (entitiesItr.hasNext()) {
TimelineEntity entity = entitiesItr.next();
+ addDefaultDomainIdIfAbsent(entity);
try {
// check ACLs
if (!timelineACLsManager.checkAccess(
@@ -161,6 +162,7 @@ public TimelineEntity getEntity(
entity =
store.getEntity(entityId, entityType, fields);
if (entity != null) {
+ addDefaultDomainIdIfAbsent(entity);
// check ACLs
if (!timelineACLsManager.checkAccess(
callerUGI, ApplicationAccessType.VIEW_APP, entity)) {
@@ -203,6 +205,7 @@ public TimelineEvents getEvents(
eventsOfOneEntity.getEntityId(),
eventsOfOneEntity.getEntityType(),
EnumSet.of(Field.PRIMARY_FILTERS));
+ addDefaultDomainIdIfAbsent(entity);
// check ACLs
if (!timelineACLsManager.checkAccess(
callerUGI, ApplicationAccessType.VIEW_APP, entity)) {
@@ -254,10 +257,12 @@ public TimelinePutResponse postEntities(
existingEntity =
store.getEntity(entityID.getId(), entityID.getType(),
EnumSet.of(Field.PRIMARY_FILTERS));
- if (existingEntity != null &&
- !existingEntity.getDomainId().equals(entity.getDomainId())) {
- throw new YarnException("The domain of the timeline entity "
- + entityID + " is not allowed to be changed.");
+ if (existingEntity != null) {
+ addDefaultDomainIdIfAbsent(existingEntity);
+ if (!existingEntity.getDomainId().equals(entity.getDomainId())) {
+ throw new YarnException("The domain of the timeline entity "
+ + entityID + " is not allowed to be changed.");
+ }
}
if (!timelineACLsManager.checkAccess(
callerUGI, ApplicationAccessType.MODIFY_APP, entity)) {
@@ -355,4 +360,11 @@ public TimelineDomains getDomains(String owner,
}
}
+ private static void addDefaultDomainIdIfAbsent(TimelineEntity entity) {
+ // be compatible with the timeline data created before 2.6
+ if (entity.getDomainId() == null) {
+ entity.setDomainId(DEFAULT_DOMAIN_ID);
+ }
+ }
+
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLeveldbTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLeveldbTimelineStore.java
index f315930812782..5ebc96b627b5c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLeveldbTimelineStore.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLeveldbTimelineStore.java
@@ -40,6 +40,7 @@
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.records.Version;
import org.apache.hadoop.yarn.server.timeline.LeveldbTimelineStore;
@@ -160,12 +161,13 @@ private boolean deleteNextEntity(String entityType, byte[] ts)
@Test
public void testGetEntityTypes() throws IOException {
List<String> entityTypes = ((LeveldbTimelineStore)store).getEntityTypes();
- assertEquals(5, entityTypes.size());
- assertEquals(entityType1, entityTypes.get(0));
- assertEquals(entityType2, entityTypes.get(1));
- assertEquals(entityType4, entityTypes.get(2));
- assertEquals(entityType5, entityTypes.get(3));
- assertEquals(entityType7, entityTypes.get(4));
+ assertEquals(6, entityTypes.size());
+ assertEquals("OLD_ENTITY_TYPE_1", entityTypes.get(0));
+ assertEquals(entityType1, entityTypes.get(1));
+ assertEquals(entityType2, entityTypes.get(2));
+ assertEquals(entityType4, entityTypes.get(3));
+ assertEquals(entityType5, entityTypes.get(4));
+ assertEquals(entityType7, entityTypes.get(5));
}
@Test
@@ -196,7 +198,7 @@ public void testDeleteEntities() throws IOException, InterruptedException {
((LeveldbTimelineStore)store).discardOldEntities(-123l);
assertEquals(2, getEntities("type_1").size());
assertEquals(0, getEntities("type_2").size());
- assertEquals(4, ((LeveldbTimelineStore)store).getEntityTypes().size());
+ assertEquals(5, ((LeveldbTimelineStore)store).getEntityTypes().size());
((LeveldbTimelineStore)store).discardOldEntities(123l);
assertEquals(0, getEntities("type_1").size());
@@ -327,4 +329,69 @@ public void testGetDomains() throws IOException {
super.testGetDomains();
}
+ @Test
+ public void testRelatingToNonExistingEntity() throws IOException {
+ TimelineEntity entityToStore = new TimelineEntity();
+ entityToStore.setEntityType("TEST_ENTITY_TYPE_1");
+ entityToStore.setEntityId("TEST_ENTITY_ID_1");
+ entityToStore.setDomainId(TimelineDataManager.DEFAULT_DOMAIN_ID);
+ entityToStore.addRelatedEntity("TEST_ENTITY_TYPE_2", "TEST_ENTITY_ID_2");
+ TimelineEntities entities = new TimelineEntities();
+ entities.addEntity(entityToStore);
+ store.put(entities);
+ TimelineEntity entityToGet =
+ store.getEntity("TEST_ENTITY_ID_2", "TEST_ENTITY_TYPE_2", null);
+ Assert.assertNotNull(entityToGet);
+ Assert.assertEquals("DEFAULT", entityToGet.getDomainId());
+ Assert.assertEquals("TEST_ENTITY_TYPE_1",
+ entityToGet.getRelatedEntities().keySet().iterator().next());
+ Assert.assertEquals("TEST_ENTITY_ID_1",
+ entityToGet.getRelatedEntities().values().iterator().next()
+ .iterator().next());
+ }
+
+ @Test
+ public void testRelatingToOldEntityWithoutDomainId() throws IOException {
+ // New entity is put in the default domain
+ TimelineEntity entityToStore = new TimelineEntity();
+ entityToStore.setEntityType("NEW_ENTITY_TYPE_1");
+ entityToStore.setEntityId("NEW_ENTITY_ID_1");
+ entityToStore.setDomainId(TimelineDataManager.DEFAULT_DOMAIN_ID);
+ entityToStore.addRelatedEntity("OLD_ENTITY_TYPE_1", "OLD_ENTITY_ID_1");
+ TimelineEntities entities = new TimelineEntities();
+ entities.addEntity(entityToStore);
+ store.put(entities);
+
+ TimelineEntity entityToGet =
+ store.getEntity("OLD_ENTITY_ID_1", "OLD_ENTITY_TYPE_1", null);
+ Assert.assertNotNull(entityToGet);
+ Assert.assertNull(entityToGet.getDomainId());
+ Assert.assertEquals("NEW_ENTITY_TYPE_1",
+ entityToGet.getRelatedEntities().keySet().iterator().next());
+ Assert.assertEquals("NEW_ENTITY_ID_1",
+ entityToGet.getRelatedEntities().values().iterator().next()
+ .iterator().next());
+
+ // New entity is not put in the default domain
+ entityToStore = new TimelineEntity();
+ entityToStore.setEntityType("NEW_ENTITY_TYPE_2");
+ entityToStore.setEntityId("NEW_ENTITY_ID_2");
+ entityToStore.setDomainId("NON_DEFAULT");
+ entityToStore.addRelatedEntity("OLD_ENTITY_TYPE_1", "OLD_ENTITY_ID_1");
+ entities = new TimelineEntities();
+ entities.addEntity(entityToStore);
+ TimelinePutResponse response = store.put(entities);
+ Assert.assertEquals(1, response.getErrors().size());
+ Assert.assertEquals(TimelinePutError.FORBIDDEN_RELATION,
+ response.getErrors().get(0).getErrorCode());
+ entityToGet =
+ store.getEntity("OLD_ENTITY_ID_1", "OLD_ENTITY_TYPE_1", null);
+ Assert.assertNotNull(entityToGet);
+ Assert.assertNull(entityToGet.getDomainId());
+ // Still have one related entity
+ Assert.assertEquals(1, entityToGet.getRelatedEntities().keySet().size());
+ Assert.assertEquals(1, entityToGet.getRelatedEntities().values()
+ .iterator().next().size());
+ }
+
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestTimelineDataManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestTimelineDataManager.java
new file mode 100644
index 0000000000000..f74956735a34b
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TestTimelineDataManager.java
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.timeline;
+
+import java.io.File;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+
+public class TestTimelineDataManager extends TimelineStoreTestUtils {
+
+ private FileContext fsContext;
+ private File fsPath;
+ private TimelineDataManager dataManaer;
+
+ @Before
+ public void setup() throws Exception {
+ fsPath = new File("target", this.getClass().getSimpleName() +
+ "-tmpDir").getAbsoluteFile();
+ fsContext = FileContext.getLocalFSFileContext();
+ fsContext.delete(new Path(fsPath.getAbsolutePath()), true);
+ Configuration conf = new YarnConfiguration();
+ conf.set(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_PATH,
+ fsPath.getAbsolutePath());
+ conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_TTL_ENABLE, false);
+ store = new LeveldbTimelineStore();
+ store.init(conf);
+ store.start();
+ loadTestEntityData();
+ loadVerificationEntityData();
+ loadTestDomainData();
+
+ TimelineACLsManager aclsManager = new TimelineACLsManager(conf);
+ dataManaer = new TimelineDataManager(store, aclsManager);
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ if (store != null) {
+ store.stop();
+ }
+ if (fsContext != null) {
+ fsContext.delete(new Path(fsPath.getAbsolutePath()), true);
+ }
+ }
+
+ @Test
+ public void testGetOldEntityWithOutDomainId() throws Exception {
+ TimelineEntity entity = dataManaer.getEntity(
+ "OLD_ENTITY_TYPE_1", "OLD_ENTITY_ID_1", null,
+ UserGroupInformation.getCurrentUser());
+ Assert.assertNotNull(entity);
+ Assert.assertEquals("OLD_ENTITY_ID_1", entity.getEntityId());
+ Assert.assertEquals("OLD_ENTITY_TYPE_1", entity.getEntityType());
+ Assert.assertEquals(
+ TimelineDataManager.DEFAULT_DOMAIN_ID, entity.getDomainId());
+ }
+
+ @Test
+ public void testGetOldEntitiesWithOutDomainId() throws Exception {
+ TimelineEntities entities = dataManaer.getEntities(
+ "OLD_ENTITY_TYPE_1", null, null, null, null, null, null, null, null,
+ UserGroupInformation.getCurrentUser());
+ Assert.assertEquals(2, entities.getEntities().size());
+ Assert.assertEquals("OLD_ENTITY_ID_2",
+ entities.getEntities().get(0).getEntityId());
+ Assert.assertEquals("OLD_ENTITY_TYPE_1",
+ entities.getEntities().get(0).getEntityType());
+ Assert.assertEquals(TimelineDataManager.DEFAULT_DOMAIN_ID,
+ entities.getEntities().get(0).getDomainId());
+ Assert.assertEquals("OLD_ENTITY_ID_1",
+ entities.getEntities().get(1).getEntityId());
+ Assert.assertEquals("OLD_ENTITY_TYPE_1",
+ entities.getEntities().get(1).getEntityType());
+ Assert.assertEquals(TimelineDataManager.DEFAULT_DOMAIN_ID,
+ entities.getEntities().get(1).getDomainId());
+ }
+
+ @Test
+ public void testUpdatingOldEntityWithoutDomainId() throws Exception {
+ // Set the domain to the default domain when updating
+ TimelineEntity entity = new TimelineEntity();
+ entity.setEntityType("OLD_ENTITY_TYPE_1");
+ entity.setEntityId("OLD_ENTITY_ID_1");
+ entity.setDomainId(TimelineDataManager.DEFAULT_DOMAIN_ID);
+ entity.addOtherInfo("NEW_OTHER_INFO_KEY", "NEW_OTHER_INFO_VALUE");
+ TimelineEntities entities = new TimelineEntities();
+ entities.addEntity(entity);
+ TimelinePutResponse response = dataManaer.postEntities(
+ entities, UserGroupInformation.getCurrentUser());
+ Assert.assertEquals(0, response.getErrors().size());
+ entity = store.getEntity("OLD_ENTITY_ID_1", "OLD_ENTITY_TYPE_1", null);
+ Assert.assertNotNull(entity);
+ // Even in leveldb, the domain is updated to the default domain Id
+ Assert.assertEquals(
+ TimelineDataManager.DEFAULT_DOMAIN_ID, entity.getDomainId());
+ Assert.assertEquals(1, entity.getOtherInfo().size());
+ Assert.assertEquals("NEW_OTHER_INFO_KEY",
+ entity.getOtherInfo().keySet().iterator().next());
+ Assert.assertEquals("NEW_OTHER_INFO_VALUE",
+ entity.getOtherInfo().values().iterator().next());
+
+ // Set the domain to the non-default domain when updating
+ entity = new TimelineEntity();
+ entity.setEntityType("OLD_ENTITY_TYPE_1");
+ entity.setEntityId("OLD_ENTITY_ID_2");
+ entity.setDomainId("NON_DEFAULT");
+ entity.addOtherInfo("NEW_OTHER_INFO_KEY", "NEW_OTHER_INFO_VALUE");
+ entities = new TimelineEntities();
+ entities.addEntity(entity);
+ response = dataManaer.postEntities(
+ entities, UserGroupInformation.getCurrentUser());
+ Assert.assertEquals(1, response.getErrors().size());
+ Assert.assertEquals(TimelinePutResponse.TimelinePutError.ACCESS_DENIED,
+ response.getErrors().get(0).getErrorCode());
+ entity = store.getEntity("OLD_ENTITY_ID_2", "OLD_ENTITY_TYPE_1", null);
+ Assert.assertNotNull(entity);
+ // In leveldb, the domain Id is still null
+ Assert.assertNull(entity.getDomainId());
+ // Updating is not executed
+ Assert.assertEquals(0, entity.getOtherInfo().size());
+ }
+
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TimelineStoreTestUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TimelineStoreTestUtils.java
index 242478cafa983..6f15b9245b8d5 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TimelineStoreTestUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/TimelineStoreTestUtils.java
@@ -210,6 +210,18 @@ protected void loadTestEntityData() throws IOException {
assertEquals(entityId7, response.getErrors().get(0).getEntityId());
assertEquals(TimelinePutError.FORBIDDEN_RELATION,
response.getErrors().get(0).getErrorCode());
+
+ if (store instanceof LeveldbTimelineStore) {
+ LeveldbTimelineStore leveldb = (LeveldbTimelineStore) store;
+ entities.setEntities(Collections.singletonList(createEntity(
+ "OLD_ENTITY_ID_1", "OLD_ENTITY_TYPE_1", 63l, null, null, null, null,
+ null)));
+ leveldb.putWithNoDomainId(entities);
+ entities.setEntities(Collections.singletonList(createEntity(
+ "OLD_ENTITY_ID_2", "OLD_ENTITY_TYPE_1", 64l, null, null, null, null,
+ null)));
+ leveldb.putWithNoDomainId(entities);
+ }
}
/**
|
d9699e02f4e28c75cc5ffdd125b11d99325766c4
|
elasticsearch
|
Changed GeoEncodingTests to ensure accuracy- always >1mm due to rounding errors with very small numbers--
|
c
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/test/java/org/elasticsearch/index/mapper/geo/GeoEncodingTests.java b/src/test/java/org/elasticsearch/index/mapper/geo/GeoEncodingTests.java
index f02e15d83370f..f52363e89ae4e 100644
--- a/src/test/java/org/elasticsearch/index/mapper/geo/GeoEncodingTests.java
+++ b/src/test/java/org/elasticsearch/index/mapper/geo/GeoEncodingTests.java
@@ -36,7 +36,7 @@ public void test() {
for (int i = 0; i < 10000; ++i) {
final double lat = randomDouble() * 180 - 90;
final double lon = randomDouble() * 360 - 180;
- final Distance precision = new Distance(randomDouble() * 10, randomFrom(Arrays.asList(DistanceUnit.MILLIMETERS, DistanceUnit.METERS, DistanceUnit.KILOMETERS)));
+ final Distance precision = new Distance(1+(randomDouble() * 9), randomFrom(Arrays.asList(DistanceUnit.MILLIMETERS, DistanceUnit.METERS, DistanceUnit.KILOMETERS)));
final GeoPointFieldMapper.Encoding encoding = GeoPointFieldMapper.Encoding.of(precision);
assertThat(encoding.precision().convert(DistanceUnit.METERS).value, lessThanOrEqualTo(precision.convert(DistanceUnit.METERS).value));
final GeoPoint geoPoint = encoding.decode(encoding.encodeCoordinate(lat), encoding.encodeCoordinate(lon), new GeoPoint());
|
5f2ee6ded78a158ec352a376b7d6ee5381e70599
|
drools
|
JBRULES-233 for leaps--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@4214 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
a
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/LeapsTest.java b/drools-compiler/src/test/java/org/drools/integrationtests/LeapsTest.java
index 6471a4b1fd2..82960c9491d 100644
--- a/drools-compiler/src/test/java/org/drools/integrationtests/LeapsTest.java
+++ b/drools-compiler/src/test/java/org/drools/integrationtests/LeapsTest.java
@@ -162,9 +162,4 @@ public void testXorGroups() throws Exception {
assertTrue( "rule2",
list.contains( "rule2" ) );
}
-
- public void testLogicalAssertionsDynamicRule() throws Exception {
- // TODO FIXME
- }
-
}
diff --git a/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java b/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java
index d561015325a..23a3167ef02 100644
--- a/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java
+++ b/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java
@@ -111,7 +111,9 @@ abstract public class AbstractWorkingMemory implements WorkingMemory,
protected long propagationIdCounter;
private ReentrantLock lock = new ReentrantLock( );
-
+
+ private List factQueue = new ArrayList( );
+
public AbstractWorkingMemory(RuleBase ruleBase,
FactHandleFactory handleFactory) {
this.ruleBase = ruleBase;
@@ -377,8 +379,52 @@ public PrimitiveLongMap getJustified() {
return this.justified;
}
+ public long getNextPropagationIdCounter() {
+ return this.propagationIdCounter++;
+ }
+
abstract public void dispose();
+ public void removeLogicalDependencies(Activation activation,
+ PropagationContext context,
+ Rule rule) throws FactException {
+ org.drools.util.LinkedList list = activation.getLogicalDependencies();
+ if ( list == null || list.isEmpty() ) {
+ return;
+ }
+ for ( LogicalDependency node = (LogicalDependency) list.getFirst(); node != null; node = (LogicalDependency) node.getNext() ) {
+ InternalFactHandle handle = (InternalFactHandle) node.getFactHandle();
+ Set set = (Set) this.justified.get( handle.getId( ) );
+ // check set for null because in some weird cases on logical assertion
+ // it comes back with the same activation/handle and tries on
+ // already cleaned this.justified. only happens on removal of rule
+ // from the working memory
+ if (set != null) {
+ set.remove( node );
+ if (set.isEmpty( )) {
+ this.justified.remove( handle.getId( ) );
+ // this needs to be scheduled so we don't upset the current
+ // working memory operation
+ this.factQueue.add( new WorkingMemoryRetractAction( handle,
+ false,
+ true,
+ context.getRuleOrigin( ),
+ context.getActivationOrigin( ) ) );
+ }
+ }
+ }
+ }
+
+ public void removeLogicalDependencies(FactHandle handle) throws FactException {
+ Set set = (Set) this.justified.remove( ((InternalFactHandle) handle).getId() );
+ if ( set != null && !set.isEmpty() ) {
+ for ( Iterator it = set.iterator(); it.hasNext(); ) {
+ LogicalDependency node = (LogicalDependency) it.next();
+ node.getJustifier().getLogicalDependencies().remove( node );
+ }
+ }
+ }
+
public void addLogicalDependency(FactHandle handle,
Activation activation,
PropagationContext context,
@@ -395,34 +441,12 @@ public void addLogicalDependency(FactHandle handle,
set.add( node );
}
- public void removeLogicalDependencies( Activation activation,
- PropagationContext context,
- Rule rule ) throws FactException {
- org.drools.util.LinkedList list = activation.getLogicalDependencies();
- if (list == null || list.isEmpty( )) {
- return;
- }
- for (LogicalDependency node = (LogicalDependency) list.getFirst( ); node != null; node = (LogicalDependency) node.getNext( )) {
- InternalFactHandle handle = (InternalFactHandle) node.getFactHandle( );
- Set set = (Set) this.justified.get( handle.getId( ) );
- set.remove( node );
- if (set.isEmpty( )) {
- this.justified.remove( handle.getId( ) );
- retractObject( handle,
- false,
- true,
- context.getRuleOrigin( ),
- context.getActivationOrigin( ) );
- }
- }
- }
-
- public void removeLogicalDependencies(FactHandle handle) throws FactException {
- Set set = (Set) this.justified.remove( ((InternalFactHandle) handle).getId() );
- if ( set != null && !set.isEmpty() ) {
- for ( Iterator it = set.iterator(); it.hasNext(); ) {
- LogicalDependency node = (LogicalDependency) it.next();
- node.getJustifier().getLogicalDependencies().remove( node );
+ protected void propagateQueuedActions() {
+ if (!this.factQueue.isEmpty( )) {
+ for (Iterator it = this.factQueue.iterator( ); it.hasNext( );) {
+ WorkingMemoryAction action = (WorkingMemoryAction) it.next( );
+ it.remove( );
+ action.propagate( );
}
}
}
@@ -431,6 +455,41 @@ public Lock getLock() {
return this.lock;
}
+ private interface WorkingMemoryAction {
+ public void propagate();
+ }
+
+ private class WorkingMemoryRetractAction implements WorkingMemoryAction {
+ private InternalFactHandle factHandle;
+ private boolean removeLogical;
+ private boolean updateEqualsMap;
+ private Rule ruleOrigin;
+ private Activation activationOrigin;
+
+
+
+ public WorkingMemoryRetractAction(InternalFactHandle factHandle,
+ boolean removeLogical,
+ boolean updateEqualsMap,
+ Rule ruleOrigin,
+ Activation activationOrigin) {
+ super();
+ this.factHandle = factHandle;
+ this.removeLogical = removeLogical;
+ this.updateEqualsMap = updateEqualsMap;
+ this.ruleOrigin = ruleOrigin;
+ this.activationOrigin = activationOrigin;
+ }
+
+ public void propagate() {
+ retractObject( this.factHandle,
+ this.removeLogical,
+ this.updateEqualsMap,
+ this.ruleOrigin,
+ this.activationOrigin );
+ }
+ }
+
protected static class FactStatus {
private int counter;
private String status;
diff --git a/drools-core/src/main/java/org/drools/leaps/FactTable.java b/drools-core/src/main/java/org/drools/leaps/FactTable.java
index 9da995d6a05..f57f08a5922 100644
--- a/drools-core/src/main/java/org/drools/leaps/FactTable.java
+++ b/drools-core/src/main/java/org/drools/leaps/FactTable.java
@@ -46,7 +46,7 @@ class FactTable extends Table {
* Tuples that are either already on agenda or are very close (missing
* exists or have not facts matching)
*/
- private final LinkedList tuples;
+ private LinkedList tuples;
/**
* initializes base LeapsTable with appropriate Comparator and positive and
@@ -67,9 +67,8 @@ public FactTable(ConflictResolver conflictResolver) {
* @param workingMemory
* @param ruleHandle
*/
- public void addRule(WorkingMemoryImpl workingMemory,
- RuleHandle ruleHandle) {
- if ( !this.rules.contains( ruleHandle ) ) {
+ public void addRule( WorkingMemoryImpl workingMemory, RuleHandle ruleHandle ) {
+ if (!this.rules.contains( ruleHandle )) {
this.rules.add( ruleHandle );
// push facts back to stack if needed
this.checkAndAddFactsToStack( workingMemory );
@@ -81,8 +80,18 @@ public void addRule(WorkingMemoryImpl workingMemory,
*
* @param ruleHandle
*/
- public void removeRule(RuleHandle ruleHandle) {
+ public void removeRule( RuleHandle ruleHandle ) {
this.rules.remove( ruleHandle );
+ // remove tuples that are still there
+ LinkedList list = new LinkedList( );
+
+ for (Iterator it = this.getTuplesIterator( ); it.hasNext( );) {
+ LeapsTuple tuple = (LeapsTuple) it.next( );
+ if (ruleHandle.getLeapsRule( ).getRule( ) != tuple.getLeapsRule( ).getRule( )) {
+ list.add( tuple );
+ }
+ }
+ this.tuples = list;
}
/**
diff --git a/drools-core/src/main/java/org/drools/leaps/WorkingMemoryImpl.java b/drools-core/src/main/java/org/drools/leaps/WorkingMemoryImpl.java
index f0e796876df..8a12a47303d 100644
--- a/drools-core/src/main/java/org/drools/leaps/WorkingMemoryImpl.java
+++ b/drools-core/src/main/java/org/drools/leaps/WorkingMemoryImpl.java
@@ -74,6 +74,8 @@ class WorkingMemoryImpl extends AbstractWorkingMemory
private final IdentityMap leapsRulesToHandlesMap = new IdentityMap( );
+ private final IdentityMap rulesActivationsMap = new IdentityMap( );
+
/**
* Construct.
*
@@ -214,11 +216,11 @@ public FactHandle assertObject( Object object,
boolean logical,
Rule rule,
Activation activation ) throws FactException {
-
+ FactHandleImpl handle ;
this.getLock().lock( );
try {
// check if the object already exists in the WM
- FactHandleImpl handle = (FactHandleImpl) this.identityMap.get( object );
+ handle = (FactHandleImpl) this.identityMap.get( object );
// lets see if the object is already logical asserted
FactStatus logicalState = (FactStatus) this.equalsMap.get( object );
@@ -237,6 +239,7 @@ public FactHandle assertObject( Object object,
activation,
activation.getPropagationContext( ),
rule );
+
return logicalState.getHandle( );
}
@@ -294,7 +297,6 @@ public FactHandle assertObject( Object object,
activation,
activation.getPropagationContext( ),
rule );
-
}
// new leaps stack token
@@ -380,12 +382,13 @@ public FactHandle assertObject( Object object,
}
}
}
-
- return handle;
+ propagateQueuedActions( );
}
finally {
- this.getLock().unlock( );
+ this.getLock( ).unlock( );
}
+
+ return handle;
}
/**
@@ -555,6 +558,8 @@ public void retractObject(FactHandle handle,
activation );
this.workingMemoryEventSupport.fireObjectRetracted( context, handle, oldObject );
+
+ propagateQueuedActions();
}
finally {
this.getLock().unlock( );
@@ -586,14 +591,37 @@ private final void invalidateActivation( LeapsTuple tuple ) {
}
}
+
+
+ public void addLogicalDependency( FactHandle handle,
+ Activation activation,
+ PropagationContext context,
+ Rule rule ) throws FactException {
+ super.addLogicalDependency( handle, activation, context, rule );
+
+ LinkedList activations = (LinkedList) this.rulesActivationsMap.get( rule );
+ if (activations == null) {
+ activations = new LinkedList( );
+ this.rulesActivationsMap.put( rule, activations );
+ }
+ activations.add( activation );
+ }
+
+
+ public void removeLogicalDependencies( Activation activation,
+ PropagationContext context,
+ Rule rule ) throws FactException {
+ super.removeLogicalDependencies( activation, context, rule );
+ }
+
/**
* @see WorkingMemory
*/
- public void modifyObject(FactHandle handle,
+ public void modifyObject( FactHandle handle,
Object object,
Rule rule,
Activation activation ) throws FactException {
- this.getLock().lock( );
+ this.getLock( ).lock( );
try {
this.retractObject( handle );
@@ -624,9 +652,10 @@ public void modifyObject(FactHandle handle,
handle,
( (FactHandleImpl) handle ).getObject( ),
object );
+ propagateQueuedActions( );
}
finally {
- this.getLock().unlock( );
+ this.getLock( ).unlock( );
}
}
@@ -778,22 +807,36 @@ protected void removeRule( List rules ) {
this.getLock( ).lock( );
try {
ArrayList ruleHandlesList;
- LeapsRule rule;
+ LeapsRule leapsRule;
RuleHandle ruleHandle;
for (Iterator it = rules.iterator( ); it.hasNext( );) {
- rule = (LeapsRule) it.next( );
+ leapsRule = (LeapsRule) it.next( );
// some times rules do not have "normal" constraints and only
// not and exists
- if (rule.getNumberOfColumns( ) > 0) {
- ruleHandlesList = (ArrayList) this.leapsRulesToHandlesMap.remove( rule );
+ if (leapsRule.getNumberOfColumns( ) > 0) {
+ ruleHandlesList = (ArrayList) this.leapsRulesToHandlesMap.remove( leapsRule );
for (int i = 0; i < ruleHandlesList.size( ); i++) {
ruleHandle = (RuleHandle) ruleHandlesList.get( i );
//
- this.getFactTable( rule.getColumnClassObjectTypeAtPosition( i ) )
+ this.getFactTable( leapsRule.getColumnClassObjectTypeAtPosition( i ) )
.removeRule( ruleHandle );
}
}
+ //
+ }
+ Rule rule = ((LeapsRule)rules.get(0)).getRule( );
+ List activations = (List) this.rulesActivationsMap.remove( rule );
+ if (activations != null) {
+ for (Iterator activationsIt = activations.iterator( ); activationsIt.hasNext( );) {
+ Activation activation = (Activation) activationsIt.next( );
+ ((LeapsTuple)activation.getTuple()).setActivation(null);
+ this.removeLogicalDependencies( activation,
+ activation.getPropagationContext( ),
+ rule );
+ }
}
+
+ propagateQueuedActions();
}
finally {
this.getLock( ).unlock( );
|
2ca1f557dc3220a047c5ee6a1d8c1a0584f6ca4e
|
orientdb
|
Fix by Luca Molino to close issue 619--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/commons/src/main/java/com/orientechnologies/common/console/TTYConsoleReader.java b/commons/src/main/java/com/orientechnologies/common/console/TTYConsoleReader.java
index 59339861f7e..1701bf54d79 100644
--- a/commons/src/main/java/com/orientechnologies/common/console/TTYConsoleReader.java
+++ b/commons/src/main/java/com/orientechnologies/common/console/TTYConsoleReader.java
@@ -70,7 +70,7 @@ public TTYConsoleReader() {
}
if (System.getProperty("file.encoding") != null) {
inStream = new InputStreamReader(System.in, System.getProperty("file.encoding"));
- outStream = new PrintStream(System.out, true, System.getProperty("file.encoding"));
+ outStream = new PrintStream(System.out, false, System.getProperty("file.encoding"));
} else {
inStream = new InputStreamReader(System.in);
outStream = System.out;
@@ -93,7 +93,6 @@ public String readLine() {
int historyNum = history.size();
boolean hintedHistory = false;
while (true) {
-
boolean escape = false;
boolean ctrl = false;
int next = inStream.read();
@@ -260,7 +259,7 @@ public String readLine() {
rewriteConsole(buffer, false);
currentPos = buffer.length();
} else {
- if (next > UNIT_SEPARATOR_CHAR && next < BACKSPACE_CHAR) {
+ if ((next > UNIT_SEPARATOR_CHAR && next < BACKSPACE_CHAR) || next > BACKSPACE_CHAR) {
StringBuffer cleaner = new StringBuffer();
for (int i = 0; i < buffer.length(); i++) {
cleaner.append(" ");
|
df7bf90803a686946e09d9ec175f8ab85277e989
|
ReactiveX-RxJava
|
GroupBy Test Improvement--ObserveOn was the wrong mechanism for delaying behavior as it was relying on the buffering of observeOn.-Now using delay() to delay the group since observeOn no longer buffers.-
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/test/java/rx/operators/OperatorGroupByTest.java b/rxjava-core/src/test/java/rx/operators/OperatorGroupByTest.java
index c808e396bb..92b085ea0f 100644
--- a/rxjava-core/src/test/java/rx/operators/OperatorGroupByTest.java
+++ b/rxjava-core/src/test/java/rx/operators/OperatorGroupByTest.java
@@ -484,21 +484,16 @@ public Integer call(Integer i) {
@Override
public Observable<Integer> call(GroupedObservable<Integer, Integer> group) {
if (group.getKey() == 0) {
- return group.observeOn(Schedulers.newThread()).map(new Func1<Integer, Integer>() {
+ return group.delay(100, TimeUnit.MILLISECONDS).map(new Func1<Integer, Integer>() {
@Override
public Integer call(Integer t) {
- try {
- Thread.sleep(2);
- } catch (InterruptedException e) {
- e.printStackTrace();
- }
return t * 10;
}
});
} else {
- return group.observeOn(Schedulers.newThread());
+ return group;
}
}
})
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.