commit_id
stringlengths 40
40
| project
stringclasses 11
values | commit_message
stringlengths 3
3.04k
| type
stringclasses 3
values | url
stringclasses 11
values | git_diff
stringlengths 555
691k
|
|---|---|---|---|---|---|
d0ea47d3ef16eb831ed535658508c5abf346bac8
|
orientdb
|
fixed collection fields in query results, issue - -3212--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/ORuntimeResult.java b/core/src/main/java/com/orientechnologies/orient/core/sql/ORuntimeResult.java
index 71debfae21f..6ad19f7893e 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/ORuntimeResult.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/ORuntimeResult.java
@@ -19,6 +19,14 @@
*/
package com.orientechnologies.orient.core.sql;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
import com.orientechnologies.common.util.OResettable;
import com.orientechnologies.orient.core.command.OCommandContext;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
@@ -33,12 +41,6 @@
import com.orientechnologies.orient.core.sql.filter.OSQLFilterItemVariable;
import com.orientechnologies.orient.core.sql.functions.OSQLFunctionRuntime;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
/**
* Handles runtime results.
*
@@ -66,6 +68,7 @@ public static ODocument createProjectionDocument(final int iProgressive) {
return doc;
}
+ @SuppressWarnings("unchecked")
public static ODocument applyRecord(final ODocument iValue, final Map<String, Object> iProjections,
final OCommandContext iContext, final OIdentifiable iRecord) {
// APPLY PROJECTIONS
@@ -105,6 +108,7 @@ else if (projectionValue instanceof OIdentifiable && !(projectionValue instanceo
&& !(projectionValue instanceof ORecord))
iValue.field(projection.getKey(), ((OIdentifiable) projectionValue).getRecord());
else if (projectionValue instanceof Iterator) {
+ boolean link = true;
// make temporary value typical case graph database elemenet's iterator edges
if (projectionValue instanceof OResettable)
((OResettable) projectionValue).reset();
@@ -113,16 +117,35 @@ else if (projectionValue instanceof Iterator) {
final Iterator projectionValueIterator = (Iterator) projectionValue;
while (projectionValueIterator.hasNext()) {
Object value = projectionValueIterator.next();
- if (value instanceof OIdentifiable)
+ if (value instanceof OIdentifiable) {
value = ((OIdentifiable) value).getRecord();
+ if (!((OIdentifiable) value).getIdentity().isPersistent())
+ link = false;
+ }
if (value != null)
iteratorValues.add(value);
}
- iValue.field(projection.getKey(), iteratorValues);
+ iValue.field(projection.getKey(), iteratorValues, link ? OType.LINKLIST : OType.EMBEDDEDLIST);
} else if (projectionValue instanceof ODocument && !((ODocument) projectionValue).getIdentity().isPersistent()) {
iValue.field(projection.getKey(), projectionValue, OType.EMBEDDED);
+ } else if (projectionValue instanceof Set<?>) {
+ OType type = OType.getTypeByValue(projectionValue);
+ if (type == OType.LINKSET && !entriesPersistent((Collection<OIdentifiable>) projectionValue))
+ type = OType.EMBEDDEDSET;
+ iValue.field(projection.getKey(), projectionValue, type);
+ } else if (projectionValue instanceof Map<?, ?>) {
+ OType type = OType.getTypeByValue(projectionValue);
+ if (type == OType.LINKMAP && !entriesPersistent(((Map<?, OIdentifiable>) projectionValue).values()))
+ type = OType.EMBEDDEDMAP;
+ iValue.field(projection.getKey(), projectionValue, type);
+ } else if (projectionValue instanceof List<?>) {
+ OType type = OType.getTypeByValue(projectionValue);
+ if (type == OType.LINKLIST && !entriesPersistent((Collection<OIdentifiable>) projectionValue))
+ type = OType.EMBEDDEDLIST;
+ iValue.field(projection.getKey(), projectionValue, type);
+
} else
iValue.field(projection.getKey(), projectionValue);
@@ -132,6 +155,14 @@ else if (projectionValue instanceof Iterator) {
return iValue;
}
+ private static boolean entriesPersistent(Collection<OIdentifiable> projectionValue) {
+ for (OIdentifiable rec : projectionValue) {
+ if (!rec.getIdentity().isPersistent())
+ return false;
+ }
+ return true;
+ }
+
public static ODocument getResult(final ODocument iValue, final Map<String, Object> iProjections) {
if (iValue != null) {
diff --git a/core/src/test/java/com/orientechnologies/orient/core/sql/select/TestSqlEmbeddedResult.java b/core/src/test/java/com/orientechnologies/orient/core/sql/select/TestSqlEmbeddedResult.java
index 91acf4bd64f..9bca80b8703 100644
--- a/core/src/test/java/com/orientechnologies/orient/core/sql/select/TestSqlEmbeddedResult.java
+++ b/core/src/test/java/com/orientechnologies/orient/core/sql/select/TestSqlEmbeddedResult.java
@@ -1,6 +1,5 @@
package com.orientechnologies.orient.core.sql.select;
-import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@@ -12,6 +11,8 @@
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.impl.ODocument;
+import com.orientechnologies.orient.core.serialization.serializer.ONetworkThreadLocalSerializer;
+import com.orientechnologies.orient.core.serialization.serializer.record.binary.ORecordSerializerBinary;
import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery;
public class TestSqlEmbeddedResult {
@@ -34,11 +35,25 @@ public void testEmbeddedRusultTypeNotLink() {
Assert.assertEquals(res.size(), 1);
ODocument ele = res.get(0);
Assert.assertNotNull(ele.field("el"));
+ ONetworkThreadLocalSerializer.setNetworkSerializer(new ORecordSerializerBinary());
byte [] bt = ele.toStream();
ODocument read = new ODocument(bt);
Assert.assertNotNull(read.field("el"));
Assert.assertEquals(read.fieldType("el"), OType.EMBEDDED);
+
+ ONetworkThreadLocalSerializer.setNetworkSerializer(null);
+ res = db.query(new OSQLSynchQuery<Object>("select $Pics as el FROM Test LET $Pics = (select expand( rel.include('format')) from $current)"));
+
+ ONetworkThreadLocalSerializer.setNetworkSerializer(new ORecordSerializerBinary());
+ Assert.assertEquals(res.size(), 1);
+ ele = res.get(0);
+ Assert.assertNotNull(ele.field("el"));
+ bt = ele.toStream();
+ read = new ODocument(bt);
+ Assert.assertNotNull(read.field("el"));
+ Assert.assertEquals(read.fieldType("el"), OType.EMBEDDEDLIST);
+ ONetworkThreadLocalSerializer.setNetworkSerializer(null);
db.drop();
}
}
|
710ae3a9d2fbcb6767872656d82b2edaeb6e0656
|
spring-framework
|
SpringJUnit4ClassRunnerAppCtxTests now verifies- seamless support for using @Inject in addition to @Autowired, etc.--
|
a
|
https://github.com/spring-projects/spring-framework
|
diff --git a/org.springframework.test/.classpath b/org.springframework.test/.classpath
index d35f01db07af..5e5a2e70be5f 100644
--- a/org.springframework.test/.classpath
+++ b/org.springframework.test/.classpath
@@ -15,6 +15,7 @@
<classpathentry combineaccessrules="false" kind="src" path="/org.springframework.web.portlet"/>
<classpathentry combineaccessrules="false" kind="src" path="/org.springframework.web.servlet"/>
<classpathentry kind="var" path="IVY_CACHE/javax.activation/com.springsource.javax.activation/1.1.0/com.springsource.javax.activation-1.1.0.jar" sourcepath="/IVY_CACHE/javax.activation/com.springsource.javax.activation/1.1.0/com.springsource.javax.activation-sources-1.1.0.jar"/>
+ <classpathentry kind="var" path="IVY_CACHE/javax.inject/com.springsource.javax.inject/0.9.0.PFD/com.springsource.javax.inject-0.9.0.PFD.jar" sourcepath="/IVY_CACHE/javax.inject/com.springsource.javax.inject/0.9.0.PFD/com.springsource.javax.inject-sources-0.9.0.PFD.jar"/>
<classpathentry kind="var" path="IVY_CACHE/javax.persistence/com.springsource.javax.persistence/1.0.0/com.springsource.javax.persistence-1.0.0.jar" sourcepath="/IVY_CACHE/javax.persistence/com.springsource.javax.persistence/1.0.0/com.springsource.javax.persistence-sources-1.0.0.jar"/>
<classpathentry kind="var" path="IVY_CACHE/javax.portlet/com.springsource.javax.portlet/2.0.0/com.springsource.javax.portlet-2.0.0.jar"/>
<classpathentry kind="var" path="IVY_CACHE/javax.servlet/com.springsource.javax.servlet/2.5.0/com.springsource.javax.servlet-2.5.0.jar" sourcepath="/IVY_CACHE/javax.servlet/com.springsource.javax.servlet/2.5.0/com.springsource.javax.servlet-sources-2.5.0.jar"/>
diff --git a/org.springframework.test/ivy.xml b/org.springframework.test/ivy.xml
index e64c205bb575..b70e01038eed 100644
--- a/org.springframework.test/ivy.xml
+++ b/org.springframework.test/ivy.xml
@@ -21,6 +21,7 @@
<dependencies>
<dependency org="javax.activation" name="com.springsource.javax.activation" rev="1.1.0" conf="provided->compile"/>
<dependency org="javax.el" name="com.springsource.javax.el" rev="1.0.0" conf="provided->compile"/>
+ <dependency org="javax.inject" name="com.springsource.javax.inject" rev="0.9.0.PFD" conf="test->compile"/>
<dependency org="javax.persistence" name="com.springsource.javax.persistence" rev="1.0.0" conf="provided->compile"/>
<dependency org="javax.portlet" name="com.springsource.javax.portlet" rev="2.0.0" conf="provided->compile"/>
<dependency org="javax.servlet" name="com.springsource.javax.servlet" rev="2.5.0" conf="provided->compile"/>
diff --git a/org.springframework.test/pom.xml b/org.springframework.test/pom.xml
index 3518579246f8..332939c8b645 100644
--- a/org.springframework.test/pom.xml
+++ b/org.springframework.test/pom.xml
@@ -25,6 +25,12 @@
<version>1.0</version>
<scope>provided</scope>
</dependency>
+ <dependency>
+ <groupId>javax.inject</groupId>
+ <artifactId>com.springsource.javax.inject</artifactId>
+ <version>0.9.0.PFD</version>
+ <scope>test</scope>
+ </dependency>
<dependency>
<groupId>javax.persistence</groupId>
<artifactId>persistence-api</artifactId>
diff --git a/org.springframework.test/src/test/java/org/springframework/test/context/junit4/SpringJUnit4ClassRunnerAppCtxTests.java b/org.springframework.test/src/test/java/org/springframework/test/context/junit4/SpringJUnit4ClassRunnerAppCtxTests.java
index 91b51242bdfc..e23164bcf1f2 100644
--- a/org.springframework.test/src/test/java/org/springframework/test/context/junit4/SpringJUnit4ClassRunnerAppCtxTests.java
+++ b/org.springframework.test/src/test/java/org/springframework/test/context/junit4/SpringJUnit4ClassRunnerAppCtxTests.java
@@ -19,9 +19,11 @@
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import javax.annotation.Resource;
+import javax.inject.Inject;
import org.junit.Test;
import org.junit.runner.RunWith;
@@ -48,6 +50,7 @@
* <ul>
* <li>{@link ContextConfiguration @ContextConfiguration}</li>
* <li>{@link Autowired @Autowired}</li>
+ * <li>{@link Inject @Inject}</li>
* <li>{@link Qualifier @Qualifier}</li>
* <li>{@link Resource @Resource}</li>
* <li>{@link ApplicationContextAware}</li>
@@ -59,10 +62,12 @@
* {@link ContextConfiguration#locations() locations} are explicitly declared
* and since the {@link ContextConfiguration#loader() ContextLoader} is left set
* to the default value of {@link GenericXmlContextLoader}, this test class's
- * dependencies will be injected via {@link Autowired @Autowired} and
- * {@link Resource @Resource} from beans defined in the
- * {@link ApplicationContext} loaded from the default classpath resource:
- * <code>"/org/springframework/test/context/junit/SpringJUnit4ClassRunnerAppCtxTests-context.xml"</code>.
+ * dependencies will be injected via {@link Autowired @Autowired},
+ * {@link Inject @Inject}, and {@link Resource @Resource} from beans defined in
+ * the {@link ApplicationContext} loaded from the default classpath resource:
+ *
+ * <code>"/org/springframework/test/context/junit/SpringJUnit4ClassRunnerAppCtxTests-context.xml"</code>
+ * .
* </p>
*
* @author Sam Brannen
@@ -93,12 +98,15 @@ public class SpringJUnit4ClassRunnerAppCtxTests implements ApplicationContextAwa
private Employee employee;
@Autowired
- private Pet pet;
+ private Pet autowiredPet;
+
+ @Inject
+ private Pet injectedPet;
@Autowired(required = false)
protected Long nonrequiredLong;
- @Resource()
+ @Resource
protected String foo;
protected String bar;
@@ -153,11 +161,14 @@ public final void verifyBeanNameSet() {
}
@Test
- public final void verifyAnnotationAutowiredFields() {
+ public final void verifyAnnotationAutowiredAndInjectedFields() {
assertNull("The nonrequiredLong field should NOT have been autowired.", this.nonrequiredLong);
assertEquals("The quux field should have been autowired via @Autowired and @Qualifier.", "Quux", this.quux);
- assertNotNull("The pet field should have been autowired.", this.pet);
- assertEquals("Fido", this.pet.getName());
+ assertNotNull("The pet field should have been autowired.", this.autowiredPet);
+ assertNotNull("The pet field should have been injected.", this.injectedPet);
+ assertEquals("Fido", this.autowiredPet.getName());
+ assertEquals("Fido", this.injectedPet.getName());
+ assertSame("@Autowired and @Inject pet should be the same object.", this.autowiredPet, this.injectedPet);
}
@Test
@@ -176,4 +187,4 @@ public final void verifyResourceAnnotationWiredMethods() {
assertEquals("The bar method should have been wired via @Resource.", "Bar", this.bar);
}
-}
+}
\ No newline at end of file
diff --git a/org.springframework.test/test.iml b/org.springframework.test/test.iml
index a5a65c18b9ae..924d7b776d9d 100644
--- a/org.springframework.test/test.iml
+++ b/org.springframework.test/test.iml
@@ -24,6 +24,7 @@
<orderEntry type="library" name="Commons Logging" level="project" />
<orderEntry type="library" name="EasyMock" level="project" />
<orderEntry type="library" name="javax.el" level="project" />
+ <orderEntry type="library" name="javax.inject" level="project" />
<orderEntry type="library" name="JUnit" level="project" />
<orderEntry type="module-library">
<library>
|
3086b191dda16c22e7a909f131296f6c060bc639
|
hbase
|
HBASE-1647 Filter-filterRow is called too often,- filters rows it shouldn't have -- reversed it for a moment; it may have- broken things -- not sure yet--git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@798510 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index 58065a2b05d7..564b5dce52fd 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -295,8 +295,6 @@ Release 0.20.0 - Unreleased
(Tim Sell and Ryan Rawson via Stack)
HBASE-1703 ICVs across /during a flush can cause multiple keys with the
same TS (bad)
- HBASE-1647 Filter#filterRow is called too often, filters rows it
- shouldn't have (Doğacan Güney via Ryan Rawson and Stack)
IMPROVEMENTS
HBASE-1089 Add count of regions on filesystem to master UI; add percentage
diff --git a/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 23c141b92cce..29ea7e5bfd0b 100644
--- a/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -53,8 +53,6 @@
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.filter.RowFilterInterface;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.Reference.Range;
import org.apache.hadoop.hbase.io.hfile.BlockCache;
@@ -1683,13 +1681,8 @@ public Path getBaseDir() {
class RegionScanner implements InternalScanner {
private final KeyValueHeap storeHeap;
private final byte [] stopRow;
- private Filter filter;
- private RowFilterInterface oldFilter;
- private List<KeyValue> results = new ArrayList<KeyValue>();
RegionScanner(Scan scan, List<KeyValueScanner> additionalScanners) {
- this.filter = scan.getFilter();
- this.oldFilter = scan.getOldFilter();
if (Bytes.equals(scan.getStopRow(), HConstants.EMPTY_END_ROW)) {
this.stopRow = null;
} else {
@@ -1713,74 +1706,46 @@ class RegionScanner implements InternalScanner {
this(scan, null);
}
- private void resetFilters() {
- if (filter != null) {
- filter.reset();
- }
- if (oldFilter != null) {
- oldFilter.reset();
- }
- }
-
/**
* Get the next row of results from this region.
* @param results list to append results to
* @return true if there are more rows, false if scanner is done
*/
- @Override
- public boolean next(List<KeyValue> outResults) throws IOException {
- results.clear();
- boolean returnResult = nextInternal();
- if (!returnResult && filter != null && filter.filterRow()) {
- results.clear();
- }
- outResults.addAll(results);
- resetFilters();
- return returnResult;
- }
-
- private boolean nextInternal() throws IOException {
+ public boolean next(List<KeyValue> results)
+ throws IOException {
// This method should probably be reorganized a bit... has gotten messy
- KeyValue kv;
- byte[] currentRow = null;
- boolean filterCurrentRow = false;
+ KeyValue kv = this.storeHeap.peek();
+ if (kv == null) {
+ return false;
+ }
+ byte [] currentRow = kv.getRow();
+ // See if we passed stopRow
+ if (stopRow != null &&
+ comparator.compareRows(stopRow, 0, stopRow.length,
+ currentRow, 0, currentRow.length) <= 0) {
+ return false;
+ }
+ this.storeHeap.next(results);
while(true) {
kv = this.storeHeap.peek();
if (kv == null) {
return false;
}
byte [] row = kv.getRow();
- if (filterCurrentRow && Bytes.equals(currentRow, row)) {
- // filter all columns until row changes
- this.storeHeap.next(results);
- results.clear();
- continue;
- }
- // see if current row should be filtered based on row key
- if ((filter != null && filter.filterRowKey(row, 0, row.length)) ||
- (oldFilter != null && oldFilter.filterRowKey(row, 0, row.length))) {
- this.storeHeap.next(results);
- results.clear();
- resetFilters();
- filterCurrentRow = true;
- currentRow = row;
- continue;
- }
if(!Bytes.equals(currentRow, row)) {
- // Continue on the next row:
- currentRow = row;
- filterCurrentRow = false;
- // See if we passed stopRow
- if(stopRow != null &&
- comparator.compareRows(stopRow, 0, stopRow.length,
- currentRow, 0, currentRow.length) <= 0) {
- return false;
- }
- // if there are _no_ results or current row should be filtered
- if (results.isEmpty() || filter != null && filter.filterRow()) {
- // make sure results is empty
- results.clear();
- resetFilters();
+ // Next row:
+
+ // what happens if there are _no_ results:
+ if (results.isEmpty()) {
+ // Continue on the next row:
+ currentRow = row;
+
+ // But did we pass the stop row?
+ if (stopRow != null &&
+ comparator.compareRows(stopRow, 0, stopRow.length,
+ currentRow, 0, currentRow.length) <= 0) {
+ return false;
+ }
continue;
}
return true;
diff --git a/src/java/org/apache/hadoop/hbase/regionserver/QueryMatcher.java b/src/java/org/apache/hadoop/hbase/regionserver/QueryMatcher.java
index a0ba3369ba44..9e9295de9621 100644
--- a/src/java/org/apache/hadoop/hbase/regionserver/QueryMatcher.java
+++ b/src/java/org/apache/hadoop/hbase/regionserver/QueryMatcher.java
@@ -325,6 +325,7 @@ public void update() {
public void reset() {
this.deletes.reset();
this.columns.reset();
+ if (this.filter != null) this.filter.reset();
}
/**
diff --git a/src/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java b/src/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
index 2ad16708e76c..cb15d317823b 100644
--- a/src/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
+++ b/src/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
@@ -114,6 +114,16 @@ public MatchCode match(KeyValue kv) {
if (this.stickyNextRow)
return MatchCode.SEEK_NEXT_ROW;
+ // Give the row filter a chance to do it's job.
+ if (filter != null && filter.filterRowKey(bytes, offset, rowLength)) {
+ stickyNextRow = true; // optimize to keep from calling the filter too much.
+ return MatchCode.SEEK_NEXT_ROW;
+ } else if (oldFilter != null && oldFilter.filterRowKey(bytes, offset, rowLength)) {
+ stickyNextRow = true;
+ return MatchCode.SEEK_NEXT_ROW;
+ }
+
+
if (this.columns.done()) {
stickyNextRow = true;
return MatchCode.SEEK_NEXT_ROW;
@@ -189,6 +199,16 @@ public MatchCode match(KeyValue kv) {
return MatchCode.SEEK_NEXT_ROW;
}
+ /**
+ * If the row was otherwise going to be included, call this to last-minute
+ * check.
+ *
+ * @return <code>true</code> if the row should be filtered.
+ */
+ public boolean filterEntireRow() {
+ return filter == null? false: filter.filterRow();
+ }
+
/**
* Set current row
* @param row
@@ -203,5 +223,7 @@ public void setRow(byte [] row) {
public void reset() {
super.reset();
stickyNextRow = false;
+ if (filter != null)
+ filter.reset();
}
}
\ No newline at end of file
diff --git a/src/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java b/src/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
index 0b39a9871afa..1c279fccca98 100644
--- a/src/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
+++ b/src/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
@@ -162,12 +162,20 @@ public synchronized boolean next(List<KeyValue> outResult) throws IOException {
continue;
case DONE:
+ if (matcher.filterEntireRow()) {
+ // nuke all results, and then return.
+ results.clear();
+ }
// copy jazz
outResult.addAll(results);
return true;
case DONE_SCAN:
+ if (matcher.filterEntireRow()) {
+ // nuke all results, and then return.
+ results.clear();
+ }
close();
// copy jazz
@@ -194,6 +202,11 @@ public synchronized boolean next(List<KeyValue> outResult) throws IOException {
throw new RuntimeException("UNEXPECTED");
}
}
+
+ if (matcher.filterEntireRow()) {
+ // nuke all results, and then return.
+ results.clear();
+ }
if (!results.isEmpty()) {
// copy jazz
diff --git a/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java b/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java
index d93668222296..369b504faec1 100644
--- a/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java
+++ b/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java
@@ -38,14 +38,6 @@
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.filter.InclusiveStopFilter;
-import org.apache.hadoop.hbase.filter.InclusiveStopRowFilter;
-import org.apache.hadoop.hbase.filter.PrefixFilter;
-import org.apache.hadoop.hbase.filter.PrefixRowFilter;
-import org.apache.hadoop.hbase.filter.RowFilterInterface;
-import org.apache.hadoop.hbase.filter.WhileMatchFilter;
-import org.apache.hadoop.hbase.filter.WhileMatchRowFilter;
import org.apache.hadoop.hbase.io.hfile.Compression;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Writables;
@@ -117,7 +109,7 @@ public void testStopRow() throws Exception {
count++;
}
s.close();
- assertEquals(0, count);
+ assertEquals(1, count);
// Now do something a bit more imvolved.
scan = new Scan(startrow, stoprow);
scan.addFamily(HConstants.CATALOG_FAMILY);
@@ -144,69 +136,6 @@ public void testStopRow() throws Exception {
shutdownDfs(this.cluster);
}
}
-
- void rowPrefixFilter(Scan scan) throws IOException {
- List<KeyValue> results = new ArrayList<KeyValue>();
- scan.addFamily(HConstants.CATALOG_FAMILY);
- InternalScanner s = r.getScanner(scan);
- boolean hasMore = true;
- while (hasMore) {
- hasMore = s.next(results);
- for (KeyValue kv : results) {
- assertEquals((byte)'a', kv.getRow()[0]);
- assertEquals((byte)'b', kv.getRow()[1]);
- }
- results.clear();
- }
- s.close();
- }
-
- void rowInclusiveStopFilter(Scan scan, byte[] stopRow) throws IOException {
- List<KeyValue> results = new ArrayList<KeyValue>();
- scan.addFamily(HConstants.CATALOG_FAMILY);
- InternalScanner s = r.getScanner(scan);
- boolean hasMore = true;
- while (hasMore) {
- hasMore = s.next(results);
- for (KeyValue kv : results) {
- assertTrue(Bytes.compareTo(kv.getRow(), stopRow) <= 0);
- }
- results.clear();
- }
- s.close();
- }
-
- public void testFilters() throws IOException {
- try {
- this.r = createNewHRegion(REGION_INFO.getTableDesc(), null, null);
- addContent(this.r, HConstants.CATALOG_FAMILY);
- Filter newFilter = new PrefixFilter(Bytes.toBytes("ab"));
- Scan scan = new Scan();
- scan.setFilter(newFilter);
- rowPrefixFilter(scan);
- RowFilterInterface oldFilter = new PrefixRowFilter(Bytes.toBytes("ab"));
- scan = new Scan();
- scan.setOldFilter(oldFilter);
- rowPrefixFilter(scan);
-
- byte[] stopRow = Bytes.toBytes("bbc");
- newFilter = new WhileMatchFilter(new InclusiveStopFilter(stopRow));
- scan = new Scan();
- scan.setFilter(newFilter);
- rowInclusiveStopFilter(scan, stopRow);
-
- oldFilter = new WhileMatchRowFilter(
- new InclusiveStopRowFilter(stopRow));
- scan = new Scan();
- scan.setOldFilter(oldFilter);
- rowInclusiveStopFilter(scan, stopRow);
-
- } finally {
- this.r.close();
- this.r.getLog().closeAndDelete();
- shutdownDfs(this.cluster);
- }
- }
/** The test!
* @throws IOException
@@ -387,6 +316,7 @@ private void scan(boolean validateStartcode, String serverName)
String server = Bytes.toString(val);
assertEquals(0, server.compareTo(serverName));
}
+ results.clear();
}
} finally {
InternalScanner s = scanner;
diff --git a/src/test/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java b/src/test/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
index 8fb2cc18b793..6f611b1f4259 100644
--- a/src/test/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
+++ b/src/test/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
@@ -20,23 +20,25 @@
package org.apache.hadoop.hbase.regionserver;
+import junit.framework.TestCase;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueTestUtil;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.filter.WhileMatchFilter;
+import org.apache.hadoop.hbase.filter.*;
+import org.apache.hadoop.hbase.util.Bytes;
+
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.NavigableSet;
import java.util.TreeSet;
-import junit.framework.TestCase;
-
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueTestUtil;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.util.Bytes;
public class TestStoreScanner extends TestCase {
final byte [] CF = Bytes.toBytes("cf");
-
+
/**
* Test utility for building a NavigableSet for scanners.
* @param strCols
@@ -65,9 +67,9 @@ public void testScanSameTimestamp() throws IOException {
Scan scanSpec = new Scan(Bytes.toBytes("R1"));
// this only uses maxVersions (default=1) and TimeRange (default=all)
StoreScanner scan =
- new StoreScanner(scanSpec, CF, Long.MAX_VALUE,
- KeyValue.COMPARATOR, getCols("a"),
- scanners);
+ new StoreScanner(scanSpec, CF, Long.MAX_VALUE,
+ KeyValue.COMPARATOR, getCols("a"),
+ scanners);
List<KeyValue> results = new ArrayList<KeyValue>();
assertEquals(true, scan.next(results));
@@ -96,9 +98,9 @@ public void testWontNextToNext() throws IOException {
Scan scanSpec = new Scan(Bytes.toBytes("R1"));
// this only uses maxVersions (default=1) and TimeRange (default=all)
StoreScanner scan =
- new StoreScanner(scanSpec, CF, Long.MAX_VALUE,
- KeyValue.COMPARATOR, getCols("a"),
- scanners);
+ new StoreScanner(scanSpec, CF, Long.MAX_VALUE,
+ KeyValue.COMPARATOR, getCols("a"),
+ scanners);
List<KeyValue> results = new ArrayList<KeyValue>();
scan.next(results);
@@ -128,8 +130,8 @@ public void testDeleteVersionSameTimestamp() throws IOException {
};
Scan scanSpec = new Scan(Bytes.toBytes("R1"));
StoreScanner scan =
- new StoreScanner(scanSpec, CF, Long.MAX_VALUE, KeyValue.COMPARATOR,
- getCols("a"), scanners);
+ new StoreScanner(scanSpec, CF, Long.MAX_VALUE, KeyValue.COMPARATOR,
+ getCols("a"), scanners);
List<KeyValue> results = new ArrayList<KeyValue>();
assertFalse(scan.next(results));
@@ -151,9 +153,9 @@ public void testDeletedRowThenGoodRow() throws IOException {
};
Scan scanSpec = new Scan(Bytes.toBytes("R1"));
StoreScanner scan =
- new StoreScanner(scanSpec, CF, Long.MAX_VALUE, KeyValue.COMPARATOR,
- getCols("a"), scanners);
-
+ new StoreScanner(scanSpec, CF, Long.MAX_VALUE, KeyValue.COMPARATOR,
+ getCols("a"), scanners);
+
List<KeyValue> results = new ArrayList<KeyValue>();
assertEquals(true, scan.next(results));
assertEquals(0, results.size());
@@ -181,8 +183,8 @@ public void testDeleteVersionMaskingMultiplePuts() throws IOException {
new KeyValueScanFixture(KeyValue.COMPARATOR, kvs2)
};
StoreScanner scan =
- new StoreScanner(new Scan(Bytes.toBytes("R1")), CF, Long.MAX_VALUE, KeyValue.COMPARATOR,
- getCols("a"), scanners);
+ new StoreScanner(new Scan(Bytes.toBytes("R1")), CF, Long.MAX_VALUE, KeyValue.COMPARATOR,
+ getCols("a"), scanners);
List<KeyValue> results = new ArrayList<KeyValue>();
// the two put at ts=now will be masked by the 1 delete, and
// since the scan default returns 1 version we'll return the newest
@@ -209,8 +211,8 @@ public void testDeleteVersionsMixedAndMultipleVersionReturn() throws IOException
};
Scan scanSpec = new Scan(Bytes.toBytes("R1")).setMaxVersions(2);
StoreScanner scan =
- new StoreScanner(scanSpec, CF, Long.MAX_VALUE, KeyValue.COMPARATOR,
- getCols("a"), scanners);
+ new StoreScanner(scanSpec, CF, Long.MAX_VALUE, KeyValue.COMPARATOR,
+ getCols("a"), scanners);
List<KeyValue> results = new ArrayList<KeyValue>();
assertEquals(true, scan.next(results));
assertEquals(2, results.size());
@@ -219,17 +221,17 @@ public void testDeleteVersionsMixedAndMultipleVersionReturn() throws IOException
}
public void testWildCardOneVersionScan() throws IOException {
- KeyValue [] kvs = new KeyValue [] {
- KeyValueTestUtil.create("R1", "cf", "a", 2, KeyValue.Type.Put, "dont-care"),
- KeyValueTestUtil.create("R1", "cf", "b", 1, KeyValue.Type.Put, "dont-care"),
- KeyValueTestUtil.create("R1", "cf", "a", 1, KeyValue.Type.DeleteColumn, "dont-care"),
- };
+ KeyValue [] kvs = new KeyValue [] {
+ KeyValueTestUtil.create("R1", "cf", "a", 2, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R1", "cf", "b", 1, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R1", "cf", "a", 1, KeyValue.Type.DeleteColumn, "dont-care"),
+ };
KeyValueScanner [] scanners = new KeyValueScanner[] {
new KeyValueScanFixture(KeyValue.COMPARATOR, kvs)
};
StoreScanner scan =
- new StoreScanner(new Scan(Bytes.toBytes("R1")), CF, Long.MAX_VALUE, KeyValue.COMPARATOR,
- null, scanners);
+ new StoreScanner(new Scan(Bytes.toBytes("R1")), CF, Long.MAX_VALUE, KeyValue.COMPARATOR,
+ null, scanners);
List<KeyValue> results = new ArrayList<KeyValue>();
assertEquals(true, scan.next(results));
assertEquals(2, results.size());
@@ -259,8 +261,8 @@ public void testWildCardScannerUnderDeletes() throws IOException {
new KeyValueScanFixture(KeyValue.COMPARATOR, kvs)
};
StoreScanner scan =
- new StoreScanner(new Scan().setMaxVersions(2), CF, Long.MAX_VALUE, KeyValue.COMPARATOR,
- null, scanners);
+ new StoreScanner(new Scan().setMaxVersions(2), CF, Long.MAX_VALUE, KeyValue.COMPARATOR,
+ null, scanners);
List<KeyValue> results = new ArrayList<KeyValue>();
assertEquals(true, scan.next(results));
assertEquals(5, results.size());
@@ -289,8 +291,8 @@ public void testDeleteFamily() throws IOException {
new KeyValueScanFixture(KeyValue.COMPARATOR, kvs)
};
StoreScanner scan =
- new StoreScanner(new Scan().setMaxVersions(Integer.MAX_VALUE), CF, Long.MAX_VALUE, KeyValue.COMPARATOR,
- null, scanners);
+ new StoreScanner(new Scan().setMaxVersions(Integer.MAX_VALUE), CF, Long.MAX_VALUE, KeyValue.COMPARATOR,
+ null, scanners);
List<KeyValue> results = new ArrayList<KeyValue>();
assertEquals(true, scan.next(results));
assertEquals(0, results.size());
@@ -312,8 +314,8 @@ public void testDeleteColumn() throws IOException {
new KeyValueScanFixture(KeyValue.COMPARATOR, kvs),
};
StoreScanner scan =
- new StoreScanner(new Scan(), CF, Long.MAX_VALUE, KeyValue.COMPARATOR,
- null, scanners);
+ new StoreScanner(new Scan(), CF, Long.MAX_VALUE, KeyValue.COMPARATOR,
+ null, scanners);
List<KeyValue> results = new ArrayList<KeyValue>();
assertEquals(true, scan.next(results));
assertEquals(1, results.size());
@@ -337,9 +339,9 @@ public void testSkipColumn() throws IOException {
new KeyValueScanFixture(KeyValue.COMPARATOR, kvs)
};
StoreScanner scan =
- new StoreScanner(new Scan(), CF, Long.MAX_VALUE, KeyValue.COMPARATOR,
- getCols("a", "d"), scanners);
-
+ new StoreScanner(new Scan(), CF, Long.MAX_VALUE, KeyValue.COMPARATOR,
+ getCols("a", "d"), scanners);
+
List<KeyValue> results = new ArrayList<KeyValue>();
assertEquals(true, scan.next(results));
assertEquals(2, results.size());
@@ -350,8 +352,156 @@ public void testSkipColumn() throws IOException {
assertEquals(true, scan.next(results));
assertEquals(1, results.size());
assertEquals(kvs[kvs.length-1], results.get(0));
-
+
results.clear();
assertEquals(false, scan.next(results));
}
+
+ KeyValue [] stdKvs = new KeyValue[] {
+ KeyValueTestUtil.create("R:1", "cf", "a", 11, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R:1", "cf", "b", 11, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R:1", "cf", "c", 11, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R:1", "cf", "d", 11, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R:1", "cf", "e", 11, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R:1", "cf", "f", 11, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R:1", "cf", "g", 11, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R:1", "cf", "h", 11, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R:1", "cf", "i", 11, KeyValue.Type.Put, "dont-care"),
+
+ // 9...
+ KeyValueTestUtil.create("R:2", "cf", "a", 11, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R:2", "cf", "c", 11, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R:2", "cf", "c", 10, KeyValue.Type.Put, "dont-care"),
+
+ // 12...
+ KeyValueTestUtil.create("R:3", "cf", "a", 11, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R:3", "cf", "c", 11, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R:3", "cf", "c", 10, KeyValue.Type.Put, "dont-care"),
+
+ // 15 ...
+ KeyValueTestUtil.create("R:4", "cf", "a", 11, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R:4", "cf", "c", 11, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R:4", "cf", "c", 10, KeyValue.Type.Put, "dont-care"),
+
+ // 18 ..
+ KeyValueTestUtil.create("R:5", "cf", "a", 11, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R:5", "cf", "c", 11, KeyValue.Type.Put, "dont-care"),
+
+ // 20...
+ KeyValueTestUtil.create("R:6", "cf", "a", 11, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R:6", "cf", "c", 11, KeyValue.Type.Put, "dont-care"),
+
+ // 22...
+ KeyValueTestUtil.create("R:7", "cf", "a", 11, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R:7", "cf", "c", 11, KeyValue.Type.Put, "dont-care"),
+
+ // 24...
+ KeyValueTestUtil.create("R:8", "cf", "a", 11, KeyValue.Type.Put, "dont-care"),
+ KeyValueTestUtil.create("R:8", "cf", "c", 11, KeyValue.Type.Put, "dont-care"),
+
+ // 26 ..
+ KeyValueTestUtil.create("RA:1", "cf", "a", 11, KeyValue.Type.Put, "dont-care"),
+
+ // 27...
+ KeyValueTestUtil.create("RA:2", "cf", "a", 11, KeyValue.Type.Put, "dont-care"),
+
+ // 28..
+ KeyValueTestUtil.create("RA:3", "cf", "a", 11, KeyValue.Type.Put, "dont-care"),
+ };
+ private StoreScanner getTestScanner(Scan s, NavigableSet<byte[]> cols) {
+ KeyValueScanner [] scanners = new KeyValueScanner[] {
+ new KeyValueScanFixture(KeyValue.COMPARATOR, stdKvs)
+ };
+
+ return new StoreScanner(s, CF, Long.MAX_VALUE, KeyValue.COMPARATOR, cols,
+ scanners);
+ }
+
+
+ // Test new and old row prefix filters.
+ public void testNewRowPrefixFilter() throws IOException {
+ Filter f = new WhileMatchFilter(
+ new PrefixFilter(Bytes.toBytes("R:")));
+ Scan s = new Scan(Bytes.toBytes("R:7"));
+ s.setFilter(f);
+
+ rowPrefixFilter(s);
+ }
+
+ public void testOldRowPrefixFilter() throws IOException {
+ RowFilterInterface f = new WhileMatchRowFilter(
+ new PrefixRowFilter(Bytes.toBytes("R:")));
+ Scan s = new Scan(Bytes.toBytes("R:7"));
+ s.setOldFilter(f);
+
+ rowPrefixFilter(s);
+
+ }
+ public void rowPrefixFilter(Scan s) throws IOException {
+
+ StoreScanner scan = getTestScanner(s, null);
+
+ List<KeyValue> results = new ArrayList<KeyValue>();
+ assertTrue(scan.next(results));
+ assertEquals(2, results.size());
+ assertEquals(stdKvs[22], results.get(0));
+ assertEquals(stdKvs[23], results.get(1));
+ results.clear();
+
+ assertTrue(scan.next(results));
+ assertEquals(2, results.size());
+ assertEquals(stdKvs[24], results.get(0));
+ assertEquals(stdKvs[25], results.get(1));
+ results.clear();
+
+ assertFalse(scan.next(results));
+ assertEquals(0, results.size());
+ }
+
+ // Test new and old row-inclusive stop filter.
+ public void testNewRowInclusiveStopFilter() throws IOException {
+ Filter f = new WhileMatchFilter(new InclusiveStopFilter(Bytes.toBytes("R:3")));
+ Scan scan = new Scan();
+ scan.setFilter(f);
+
+ rowInclusiveStopFilter(scan);
+ }
+
+ public void testOldRowInclusiveTopFilter() throws IOException {
+ RowFilterInterface f = new WhileMatchRowFilter(
+ new InclusiveStopRowFilter(Bytes.toBytes("R:3")));
+ Scan scan = new Scan();
+ scan.setOldFilter(f);
+
+ rowInclusiveStopFilter(scan);
+ }
+
+ public void rowInclusiveStopFilter(Scan scan) throws IOException {
+ StoreScanner s = getTestScanner(scan, getCols("a"));
+
+ // read crap.
+ List<KeyValue> results = new ArrayList<KeyValue>();
+ assertTrue(s.next(results));
+ assertEquals(1, results.size());
+ assertEquals(stdKvs[0], results.get(0));
+ results.clear();
+
+ assertTrue(s.next(results));
+ assertEquals(1, results.size());
+ assertEquals(stdKvs[9], results.get(0));
+ results.clear();
+
+ assertTrue(s.next(results));
+ assertEquals(1, results.size());
+ assertEquals(stdKvs[12], results.get(0));
+ results.clear();
+
+ // without aggressive peeking, the scanner doesnt know if the next row is good or not
+ // under the affects of a filter.
+ assertFalse(s.next(results));
+ assertEquals(0, results.size());
+ }
+
+
+
}
|
9bac807cedbcff34e1a144fb475eff267e5ed86d
|
hadoop
|
MAPREDUCE-2187. Reporter sends progress during- sort/merge. Contributed by Anupam Seth.--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1152964 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/mapreduce/CHANGES.txt b/mapreduce/CHANGES.txt
index aee8b0a8cedc4..a95155a931d7b 100644
--- a/mapreduce/CHANGES.txt
+++ b/mapreduce/CHANGES.txt
@@ -40,6 +40,9 @@ Trunk (unreleased changes)
IMPROVEMENTS
+ MAPREDUCE-2187. Reporter sends progress during sort/merge. (Anupam Seth via
+ acmurthy)
+
MAPREDUCE-2365. Add counters to track bytes (read,written) via
File(Input,Output)Format. (Siddharth Seth via acmurthy)
diff --git a/mapreduce/src/java/mapred-default.xml b/mapreduce/src/java/mapred-default.xml
index 0b74e9778cb6a..db2d79a35dfd0 100644
--- a/mapreduce/src/java/mapred-default.xml
+++ b/mapreduce/src/java/mapred-default.xml
@@ -1041,6 +1041,14 @@
</property>
<!-- End of TaskTracker DistributedCache configuration -->
+<property>
+ <name>mapreduce.task.combine.progress.records</name>
+ <value>10000</value>
+ <description> The number of records to process during combine output collection
+ before sending a progress notification to the TaskTracker.
+ </description>
+</property>
+
<property>
<name>mapreduce.task.merge.progress.records</name>
<value>10000</value>
diff --git a/mapreduce/src/java/org/apache/hadoop/mapred/MapTask.java b/mapreduce/src/java/org/apache/hadoop/mapred/MapTask.java
index 44ba9a7e68a03..951b45ae70fa4 100644
--- a/mapreduce/src/java/org/apache/hadoop/mapred/MapTask.java
+++ b/mapreduce/src/java/org/apache/hadoop/mapred/MapTask.java
@@ -946,7 +946,7 @@ public MapOutputBuffer(TaskUmbilicalProtocol umbilical, JobConf job,
if (combinerRunner != null) {
final Counters.Counter combineOutputCounter =
reporter.getCounter(TaskCounter.COMBINE_OUTPUT_RECORDS);
- combineCollector= new CombineOutputCollector<K,V>(combineOutputCounter);
+ combineCollector= new CombineOutputCollector<K,V>(combineOutputCounter, reporter, conf);
} else {
combineCollector = null;
}
diff --git a/mapreduce/src/java/org/apache/hadoop/mapred/ReduceTask.java b/mapreduce/src/java/org/apache/hadoop/mapred/ReduceTask.java
index 0225982139b8b..6256c662730e8 100644
--- a/mapreduce/src/java/org/apache/hadoop/mapred/ReduceTask.java
+++ b/mapreduce/src/java/org/apache/hadoop/mapred/ReduceTask.java
@@ -352,7 +352,7 @@ public void run(JobConf job, final TaskUmbilicalProtocol umbilical)
Class combinerClass = conf.getCombinerClass();
CombineOutputCollector combineCollector =
(null != combinerClass) ?
- new CombineOutputCollector(reduceCombineOutputCounter) : null;
+ new CombineOutputCollector(reduceCombineOutputCounter, reporter, conf) : null;
Shuffle shuffle =
new Shuffle(getTaskID(), job, FileSystem.getLocal(job), umbilical,
diff --git a/mapreduce/src/java/org/apache/hadoop/mapred/Task.java b/mapreduce/src/java/org/apache/hadoop/mapred/Task.java
index f5abb3022a56f..8ad56a7d05137 100644
--- a/mapreduce/src/java/org/apache/hadoop/mapred/Task.java
+++ b/mapreduce/src/java/org/apache/hadoop/mapred/Task.java
@@ -58,6 +58,7 @@
import org.apache.hadoop.mapreduce.TaskCounter;
import org.apache.hadoop.mapreduce.JobStatus;
import org.apache.hadoop.mapreduce.MRConfig;
+import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.lib.reduce.WrappedReducer;
import org.apache.hadoop.mapreduce.task.ReduceContextImpl;
import org.apache.hadoop.mapreduce.util.ResourceCalculatorPlugin;
@@ -79,6 +80,7 @@ abstract public class Task implements Writable, Configurable {
LogFactory.getLog(Task.class);
public static String MERGED_OUTPUT_PREFIX = ".merged";
+ public static final long DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS = 10000;
/**
* Counters to measure the usage of the different file systems.
@@ -1176,16 +1178,26 @@ public static class CombineOutputCollector<K extends Object, V extends Object>
implements OutputCollector<K, V> {
private Writer<K, V> writer;
private Counters.Counter outCounter;
- public CombineOutputCollector(Counters.Counter outCounter) {
+ private Progressable progressable;
+ private long progressBar;
+
+ public CombineOutputCollector(Counters.Counter outCounter, Progressable progressable, Configuration conf) {
this.outCounter = outCounter;
+ this.progressable=progressable;
+ progressBar = conf.getLong(MRJobConfig.COMBINE_RECORDS_BEFORE_PROGRESS, DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS);
}
+
public synchronized void setWriter(Writer<K, V> writer) {
this.writer = writer;
}
+
public synchronized void collect(K key, V value)
throws IOException {
outCounter.increment(1);
writer.append(key, value);
+ if ((outCounter.getValue() % progressBar) == 0) {
+ progressable.progress();
+ }
}
}
diff --git a/mapreduce/src/java/org/apache/hadoop/mapreduce/MRJobConfig.java b/mapreduce/src/java/org/apache/hadoop/mapreduce/MRJobConfig.java
index bcaeaf147af0e..0054646caf185 100644
--- a/mapreduce/src/java/org/apache/hadoop/mapreduce/MRJobConfig.java
+++ b/mapreduce/src/java/org/apache/hadoop/mapreduce/MRJobConfig.java
@@ -260,6 +260,8 @@ public interface MRJobConfig {
public static final String REDUCE_MEMTOMEM_ENABLED = "mapreduce.reduce.merge.memtomem.enabled";
+ public static final String COMBINE_RECORDS_BEFORE_PROGRESS = "mapreduce.task.combine.progress.records";
+
public static final String JOB_NAMENODES = "mapreduce.job.hdfs-servers";
public static final String JOB_JOBTRACKER_ID = "mapreduce.job.kerberos.jtprinicipal";
|
4fa8844a07917f925668409e461c2c48f9bbc965
|
camel
|
CAMEL-6053: Allow to override blueprint config- admin placeholders from unit test.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1443931 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/camel
|
diff --git a/components/camel-test-blueprint/src/main/java/org/apache/camel/test/blueprint/CamelBlueprintTestSupport.java b/components/camel-test-blueprint/src/main/java/org/apache/camel/test/blueprint/CamelBlueprintTestSupport.java
index 72db54659de43..d26d6992e739e 100644
--- a/components/camel-test-blueprint/src/main/java/org/apache/camel/test/blueprint/CamelBlueprintTestSupport.java
+++ b/components/camel-test-blueprint/src/main/java/org/apache/camel/test/blueprint/CamelBlueprintTestSupport.java
@@ -16,6 +16,7 @@
*/
package org.apache.camel.test.blueprint;
+import java.util.Dictionary;
import java.util.Properties;
import org.apache.camel.CamelContext;
@@ -26,6 +27,8 @@
import org.junit.Before;
import org.osgi.framework.BundleContext;
import org.osgi.service.blueprint.container.BlueprintContainer;
+import org.osgi.service.cm.Configuration;
+import org.osgi.service.cm.ConfigurationAdmin;
/**
* Base class for OSGi Blueprint unit tests with Camel.
@@ -47,6 +50,19 @@ public void setUp() throws Exception {
bundleContext.registerService(PropertiesComponent.OVERRIDE_PROPERTIES, extra, null);
}
+ // allow end users to override config admin service with extra properties
+ Dictionary props = new Properties();
+ String pid = useOverridePropertiesWithConfigAdmin(props);
+ if (pid != null) {
+ ConfigurationAdmin configAdmin = getOsgiService(ConfigurationAdmin.class);
+ Configuration config = configAdmin.getConfiguration(pid);
+ if (config == null) {
+ throw new IllegalArgumentException("Cannot find configuration with pid " + pid + " in OSGi ConfigurationAdmin service.");
+ }
+ log.info("Updating ConfigAdmin {} by overriding properties {}", config, props);
+ config.update(props);
+ }
+
super.setUp();
// must wait for blueprint container to be published then the namespace parser is complete and we are ready for testing
@@ -54,6 +70,16 @@ public void setUp() throws Exception {
getOsgiService(BlueprintContainer.class, "(osgi.blueprint.container.symbolicname=" + symbolicName + ")");
}
+ /**
+ * Override this method to override config admin properties.
+ *
+ * @param props properties where you add the properties to override
+ * @return the PID of the OSGi {@link ConfigurationAdmin} which are defined in the Blueprint XML file.
+ */
+ protected String useOverridePropertiesWithConfigAdmin(Dictionary props) {
+ return null;
+ }
+
@After
@Override
public void tearDown() throws Exception {
diff --git a/components/camel-test-blueprint/src/test/java/org/apache/camel/test/blueprint/ConfigAdminOverridePropertiesOutsideCamelContextTest.java b/components/camel-test-blueprint/src/test/java/org/apache/camel/test/blueprint/ConfigAdminOverridePropertiesOutsideCamelContextTest.java
new file mode 100644
index 0000000000000..776594730ecde
--- /dev/null
+++ b/components/camel-test-blueprint/src/test/java/org/apache/camel/test/blueprint/ConfigAdminOverridePropertiesOutsideCamelContextTest.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.test.blueprint;
+
+import java.util.Dictionary;
+
+import org.junit.Test;
+
+/**
+ *
+ */
+public class ConfigAdminOverridePropertiesOutsideCamelContextTest extends CamelBlueprintTestSupport {
+
+ @Override
+ protected String getBlueprintDescriptor() {
+ return "org/apache/camel/test/blueprint/configadmin-outside.xml";
+ }
+
+ // START SNIPPET: e1
+ @Override
+ protected String useOverridePropertiesWithConfigAdmin(Dictionary props) {
+ // add the properties we want to override
+ props.put("greeting", "Bye");
+
+ // return the PID of the config-admin we are using in the blueprint xml file
+ return "my-placeholders";
+ }
+ // END SNIPPET: e1
+
+ @Test
+ public void testConfigAdmin() throws Exception {
+ getMockEndpoint("mock:result").expectedBodiesReceived("Bye World");
+
+ template.sendBody("direct:start", "World");
+
+ assertMockEndpointsSatisfied();
+ }
+
+}
diff --git a/components/camel-test-blueprint/src/test/java/org/apache/camel/test/blueprint/MyCoolBean.java b/components/camel-test-blueprint/src/test/java/org/apache/camel/test/blueprint/MyCoolBean.java
new file mode 100644
index 0000000000000..0b370879ad0a0
--- /dev/null
+++ b/components/camel-test-blueprint/src/test/java/org/apache/camel/test/blueprint/MyCoolBean.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.test.blueprint;
+
+/**
+ *
+ */
+public class MyCoolBean {
+
+ private String say;
+
+ public String getSay() {
+ return say;
+ }
+
+ public void setSay(String say) {
+ this.say = say;
+ }
+
+ public String saySomething(String s) {
+ return say + " " + s;
+ }
+}
diff --git a/components/camel-test-blueprint/src/test/resources/org/apache/camel/test/blueprint/configadmin-outside.xml b/components/camel-test-blueprint/src/test/resources/org/apache/camel/test/blueprint/configadmin-outside.xml
new file mode 100644
index 0000000000000..bef05a066bdfc
--- /dev/null
+++ b/components/camel-test-blueprint/src/test/resources/org/apache/camel/test/blueprint/configadmin-outside.xml
@@ -0,0 +1,48 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<blueprint xmlns="http://www.osgi.org/xmlns/blueprint/v1.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:cm="http://aries.apache.org/blueprint/xmlns/blueprint-cm/v1.0.0"
+ xsi:schemaLocation="
+ http://aries.apache.org/blueprint/xmlns/blueprint-cm/v1.0.0 http://aries.apache.org/schemas/blueprint-cm/blueprint-cm-1.0.0.xsd
+ http://www.osgi.org/xmlns/blueprint/v1.0.0 http://www.osgi.org/xmlns/blueprint/v1.0.0/blueprint.xsd">
+
+ <!-- blueprint property placeholders -->
+ <cm:property-placeholder persistent-id="my-placeholders">
+ <cm:default-properties>
+ <cm:property name="greeting" value="Hello"/>
+ <cm:property name="destination" value="mock:result"/>
+ </cm:default-properties>
+ </cm:property-placeholder>
+
+ <!-- a bean that uses a blueprint property placeholder -->
+ <bean id="myCoolBean" class="org.apache.camel.test.blueprint.MyCoolBean">
+ <property name="say" value="${greeting}"/>
+ </bean>
+
+ <camelContext xmlns="http://camel.apache.org/schema/blueprint">
+
+ <route>
+ <from uri="direct:start"/>
+ <bean ref="myCoolBean" method="saySomething"/>
+ <to uri="{{destination}}"/>
+ </route>
+
+ </camelContext>
+
+</blueprint>
|
1ae65999cf19232ed85d3329fd4f4887c2f3fe47
|
intellij-community
|
refactor name: it could be any RC--
|
p
|
https://github.com/JetBrains/intellij-community
|
diff --git a/xml/impl/src/com/intellij/ide/browsers/BrowserStarter.java b/xml/impl/src/com/intellij/ide/browsers/BrowserStarter.java
index 8e3f014ea5aff..ca6726926d6f8 100644
--- a/xml/impl/src/com/intellij/ide/browsers/BrowserStarter.java
+++ b/xml/impl/src/com/intellij/ide/browsers/BrowserStarter.java
@@ -21,12 +21,12 @@ public class BrowserStarter {
private static final Logger LOG = Logger.getInstance(BrowserStarter.class);
private final StartBrowserSettings mySettings;
- private final RunConfiguration myNodeRunConfiguration;
+ private final RunConfiguration myRunConfiguration;
private final ProcessHandler myServerProcessHandler;
public BrowserStarter(@NotNull RunConfiguration runConfiguration, @NotNull StartBrowserSettings settings, @NotNull ProcessHandler serverProcessHandler) {
mySettings = settings;
- myNodeRunConfiguration = runConfiguration;
+ myRunConfiguration = runConfiguration;
myServerProcessHandler = serverProcessHandler;
}
@@ -102,7 +102,7 @@ public void run() {
private void openPageNow() {
if (!isProcessTerminated()) {
- JavaScriptDebuggerStarter.Util.startDebugOrLaunchBrowser(myNodeRunConfiguration, mySettings);
+ JavaScriptDebuggerStarter.Util.startDebugOrLaunchBrowser(myRunConfiguration, mySettings);
}
}
|
57f4f664ba0bf785e9903535a4965d786cf13062
|
kotlin
|
refactored generation of static initializer--
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/idea/src/org/jetbrains/jet/codegen/ClassBodyCodegen.java b/idea/src/org/jetbrains/jet/codegen/ClassBodyCodegen.java
index ed0735204e8d6..58a6fa5d7d4c4 100644
--- a/idea/src/org/jetbrains/jet/codegen/ClassBodyCodegen.java
+++ b/idea/src/org/jetbrains/jet/codegen/ClassBodyCodegen.java
@@ -4,8 +4,11 @@
import org.jetbrains.jet.lang.descriptors.PropertyDescriptor;
import org.jetbrains.jet.lang.psi.*;
import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
+import org.objectweb.asm.commons.InstructionAdapter;
+import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
@@ -22,6 +25,8 @@ public abstract class ClassBodyCodegen {
protected final ClassVisitor v;
protected final ClassContext context;
+ protected final List<CodeChunk> staticInitializerChunks = new ArrayList<CodeChunk>();
+
public ClassBodyCodegen(JetClassOrObject aClass, ClassContext context, ClassVisitor v, GenerationState state) {
this.state = state;
descriptor = state.getBindingContext().getClassDescriptor(aClass);
@@ -38,6 +43,8 @@ public void generate() {
generateClassBody();
+ generateStaticInitializer();
+
v.visitEnd();
}
@@ -95,4 +102,23 @@ protected List<JetParameter> getPrimaryConstructorParameters() {
}
return Collections.emptyList();
}
+
+ private void generateStaticInitializer() {
+ if (staticInitializerChunks.size() > 0) {
+ final MethodVisitor mv = v.visitMethod(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC,
+ "<clinit>", "()V", null, null);
+ mv.visitCode();
+
+ InstructionAdapter v = new InstructionAdapter(mv);
+
+ for (CodeChunk chunk : staticInitializerChunks) {
+ chunk.generate(v);
+ }
+
+ mv.visitInsn(Opcodes.RETURN);
+ mv.visitMaxs(0, 0);
+
+ mv.visitEnd();
+ }
+ }
}
diff --git a/idea/src/org/jetbrains/jet/codegen/CodeChunk.java b/idea/src/org/jetbrains/jet/codegen/CodeChunk.java
new file mode 100644
index 0000000000000..eb0aa42088415
--- /dev/null
+++ b/idea/src/org/jetbrains/jet/codegen/CodeChunk.java
@@ -0,0 +1,10 @@
+package org.jetbrains.jet.codegen;
+
+import org.objectweb.asm.commons.InstructionAdapter;
+
+/**
+ * @author yole
+ */
+public interface CodeChunk {
+ void generate(InstructionAdapter v);
+}
diff --git a/idea/src/org/jetbrains/jet/codegen/ImplementationBodyCodegen.java b/idea/src/org/jetbrains/jet/codegen/ImplementationBodyCodegen.java
index f013f20e2d91a..1287090056967 100644
--- a/idea/src/org/jetbrains/jet/codegen/ImplementationBodyCodegen.java
+++ b/idea/src/org/jetbrains/jet/codegen/ImplementationBodyCodegen.java
@@ -74,29 +74,74 @@ protected String getSuperClass() {
@Override
protected void generateSyntheticParts() {
- int typeinfoStatic = descriptor.getTypeConstructor().getParameters().size() > 0 ? 0 : Opcodes.ACC_STATIC;
- v.visitField(Opcodes.ACC_PRIVATE | typeinfoStatic, "$typeInfo", "Ljet/typeinfo/TypeInfo;", null, null);
+ generateFieldForTypeInfo();
+ generateFieldForObjectInstance();
+ generateFieldForClassObject();
+ try {
+ generatePrimaryConstructor();
+ }
+ catch(RuntimeException e) {
+ throw new RuntimeException("Error generating primary constructor of class " + myClass.getName() + " with kind " + kind, e);
+ }
+
+ generateGetTypeInfo();
+ }
+
+ private void generateFieldForTypeInfo() {
+ final boolean typeInfoIsStatic = descriptor.getTypeConstructor().getParameters().size() == 0;
+ v.visitField(Opcodes.ACC_PRIVATE | (typeInfoIsStatic ? Opcodes.ACC_STATIC : 0), "$typeInfo",
+ "Ljet/typeinfo/TypeInfo;", null, null);
+ if (typeInfoIsStatic) {
+ staticInitializerChunks.add(new CodeChunk() {
+ @Override
+ public void generate(InstructionAdapter v) {
+ JetTypeMapper typeMapper = state.getTypeMapper();
+ ClassCodegen.newTypeInfo(v, false, typeMapper.jvmType(descriptor, OwnerKind.INTERFACE));
+ v.putstatic(typeMapper.jvmName(descriptor, kind), "$typeInfo", "Ljet/typeinfo/TypeInfo;");
+ }
+ });
+ }
+ }
+
+ private void generateFieldForObjectInstance() {
if (isNonLiteralObject()) {
Type type = JetTypeMapper.jetImplementationType(descriptor);
v.visitField(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, "$instance", type.getDescriptor(), null, null);
+
+ staticInitializerChunks.add(new CodeChunk() {
+ @Override
+ public void generate(InstructionAdapter v) {
+ String name = jvmName();
+ v.anew(Type.getObjectType(name));
+ v.dup();
+ v.invokespecial(name, "<init>", "()V");
+ v.putstatic(name, "$instance", JetTypeMapper.jetImplementationType(descriptor).getDescriptor());
+ }
+ });
+
}
+ }
+
+ private void generateFieldForClassObject() {
final JetClassObject classObject = getClassObject();
if (classObject != null) {
Type type = Type.getObjectType(state.getTypeMapper().jvmName(classObject));
v.visitField(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, "$classobj", type.getDescriptor(), null, null);
- }
-
- generateStaticInitializer();
- try {
- generatePrimaryConstructor();
- }
- catch(RuntimeException e) {
- throw new RuntimeException("Error generating primary constructor of class " + myClass.getName() + " with kind " + kind, e);
+ staticInitializerChunks.add(new CodeChunk() {
+ @Override
+ public void generate(InstructionAdapter v) {
+ String name = state.getTypeMapper().jvmName(classObject);
+ final Type classObjectType = Type.getObjectType(name);
+ v.anew(classObjectType);
+ v.dup();
+ v.invokespecial(name, "<init>", "()V");
+ v.putstatic(state.getTypeMapper().jvmName(descriptor, OwnerKind.IMPLEMENTATION), "$classobj",
+ classObjectType.getDescriptor());
+ }
+ });
}
-
- generateGetTypeInfo();
}
protected void generatePrimaryConstructor() {
@@ -409,47 +454,6 @@ else if (declaration instanceof JetFunction) {
}
}
- private void generateStaticInitializer() {
- boolean needTypeInfo = descriptor.getTypeConstructor().getParameters().size() == 0;
- boolean needInstance = isNonLiteralObject();
- JetClassObject classObject = getClassObject();
- if (!needTypeInfo && !needInstance && classObject == null) {
- return;
- }
- final MethodVisitor mv = v.visitMethod(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC,
- "<clinit>", "()V", null, null);
- mv.visitCode();
-
- InstructionAdapter v = new InstructionAdapter(mv);
-
- if (needTypeInfo) {
- JetTypeMapper typeMapper = state.getTypeMapper();
- ClassCodegen.newTypeInfo(v, false, typeMapper.jvmType(descriptor, OwnerKind.INTERFACE));
- v.putstatic(typeMapper.jvmName(descriptor, kind), "$typeInfo", "Ljet/typeinfo/TypeInfo;");
- }
- if (needInstance) {
- String name = jvmName();
- v.anew(Type.getObjectType(name));
- v.dup();
- v.invokespecial(name, "<init>", "()V");
- v.putstatic(name, "$instance", JetTypeMapper.jetImplementationType(descriptor).getDescriptor());
- }
- if (classObject != null) {
- String name = state.getTypeMapper().jvmName(classObject);
- final Type classObjectType = Type.getObjectType(name);
- v.anew(classObjectType);
- v.dup();
- v.invokespecial(name, "<init>", "()V");
- v.putstatic(state.getTypeMapper().jvmName(descriptor, OwnerKind.IMPLEMENTATION), "$classobj",
- classObjectType.getDescriptor());
- }
-
- mv.visitInsn(Opcodes.RETURN);
- mv.visitMaxs(0, 0);
-
- mv.visitEnd();
- }
-
@Nullable
private JetClassObject getClassObject() {
return myClass instanceof JetClass ? ((JetClass) myClass).getClassObject() : null;
|
bc6ad67d673dfdebd216b021193f736dcf5a76f8
|
hbase
|
HBASE-1386 NPE in housekeeping--git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@772703 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index 537c12dea555..254a4e31494d 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -115,6 +115,7 @@ Release 0.20.0 - Unreleased
HBASE-1377 RS address is null in master web UI
HBASE-1344 WARN IllegalStateException: Cannot set a region as open if it has
not been pending
+ HBASE-1386 NPE in housekeeping
IMPROVEMENTS
HBASE-1089 Add count of regions on filesystem to master UI; add percentage
diff --git a/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index 656c395d81ae..9f6b1db91e1b 100644
--- a/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ b/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -1124,6 +1124,7 @@ private boolean isHealthy() {
}
return true;
}
+
/*
* Run some housekeeping tasks before we go into 'hibernation' sleeping at
* the end of the main HRegionServer run loop.
@@ -1132,12 +1133,16 @@ private void housekeeping() {
// If the todo list has > 0 messages, iterate looking for open region
// messages. Send the master a message that we're working on its
// processing so it doesn't assign the region elsewhere.
- if (this.toDo.size() <= 0) {
+ if (this.toDo.isEmpty()) {
return;
}
// This iterator is 'safe'. We are guaranteed a view on state of the
// queue at time iterator was taken out. Apparently goes from oldest.
for (ToDoEntry e: this.toDo) {
+ HMsg msg = e.msg;
+ if (msg == null) {
+ LOG.warn("Message is empty: " + e);
+ }
if (e.msg.isType(HMsg.Type.MSG_REGION_OPEN)) {
addProcessingMessage(e.msg.getRegionInfo());
}
@@ -1299,15 +1304,16 @@ void reportSplit(HRegionInfo oldRegion, HRegionInfo newRegionA,
/*
* Data structure to hold a HMsg and retries count.
*/
- private static class ToDoEntry {
- protected int tries;
+ private static final class ToDoEntry {
+ protected volatile int tries;
protected final HMsg msg;
- ToDoEntry(HMsg msg) {
+
+ ToDoEntry(final HMsg msg) {
this.tries = 0;
this.msg = msg;
}
}
-
+
final BlockingQueue<ToDoEntry> toDo = new LinkedBlockingQueue<ToDoEntry>();
private Worker worker;
private Thread workerThread;
|
a6fa02a07fe374204e9e02914ccf1cc9812aa5ba
|
restlet-framework-java
|
- Initial code for new default HTTP connector and- SIP connector.--
|
a
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet/src/org/restlet/engine/http/connector/Connection.java b/modules/org.restlet/src/org/restlet/engine/http/connector/Connection.java
index 8631b810ca..a0b89ae9c7 100644
--- a/modules/org.restlet/src/org/restlet/engine/http/connector/Connection.java
+++ b/modules/org.restlet/src/org/restlet/engine/http/connector/Connection.java
@@ -1037,10 +1037,12 @@ public void writeMessages() {
}
}
- writeMessage(message);
+ if (message != null) {
+ writeMessage(message);
- if (getState() == ConnectionState.CLOSING) {
- close(true);
+ if (getState() == ConnectionState.CLOSING) {
+ close(true);
+ }
}
}
} catch (Exception e) {
|
6c0386029b4620e622f6d62939567f88238a21a2
|
camel
|
CAMEL-2011: JmsEndpoint is now singleton.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@814584 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/camel
|
diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsEndpoint.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsEndpoint.java
index 78ad2ac9dfc3b..8c6a049c9f13e 100644
--- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsEndpoint.java
+++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsEndpoint.java
@@ -305,8 +305,9 @@ public void setSelector(String selector) {
this.selector = selector;
}
+ @ManagedAttribute
public boolean isSingleton() {
- return false;
+ return true;
}
public synchronized Requestor getRequestor() throws Exception {
diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/issues/JmsSendToAlotOfDestinationWithSameEndpointTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/issues/JmsSendToAlotOfDestinationWithSameEndpointTest.java
index 6beb3397d1d43..c6c84ae801834 100644
--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/issues/JmsSendToAlotOfDestinationWithSameEndpointTest.java
+++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/issues/JmsSendToAlotOfDestinationWithSameEndpointTest.java
@@ -47,7 +47,6 @@ public void testSendToAlotOfMessageToQueues() throws Exception {
// use the same endpoint but provide a header with the dynamic queue we send to
// this allows us to reuse endpoints and not create a new endpoint for each and every jms queue
// we send to
- Thread.sleep(50);
if (i > 0 && i % 50 == 0) {
LOG.info("Send " + i + " messages so far");
}
|
b8291d673e065fdc24d82aca9e1e4e110e8d81c2
|
hbase
|
HADOOP-2295 Fix assigning a region to multiple- servers--git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk/src/contrib/hbase@599578 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index 3c81729b59af..476a99522c1f 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -39,6 +39,7 @@ Trunk (unreleased changes)
may not restart)
HADOOP-2253 getRow can return HBASE::DELETEVAL cells
(Bryan Duxbury via Stack)
+ HADOOP-2295 Fix assigning a region to multiple servers
IMPROVEMENTS
HADOOP-2401 Add convenience put method that takes writable
diff --git a/src/java/org/apache/hadoop/hbase/HMaster.java b/src/java/org/apache/hadoop/hbase/HMaster.java
index d2e930d90b72..d5424d36a54b 100644
--- a/src/java/org/apache/hadoop/hbase/HMaster.java
+++ b/src/java/org/apache/hadoop/hbase/HMaster.java
@@ -1136,12 +1136,16 @@ public void run() {
// Join up with all threads
try {
- rootScannerThread.join(); // Wait for the root scanner to finish.
+ if (rootScannerThread.isAlive()) {
+ rootScannerThread.join(); // Wait for the root scanner to finish.
+ }
} catch (Exception iex) {
LOG.warn("root scanner", iex);
}
try {
- metaScannerThread.join(); // Wait for meta scanner to finish.
+ if (metaScannerThread.isAlive()) {
+ metaScannerThread.join(); // Wait for meta scanner to finish.
+ }
} catch(Exception iex) {
LOG.warn("meta scanner", iex);
}
@@ -1460,10 +1464,25 @@ private HMsg[] processMsgs(HServerInfo info, HMsg incomingMsgs[])
// Get reports on what the RegionServer did.
for (int i = 0; i < incomingMsgs.length; i++) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Received " + incomingMsgs[i].toString() + "from " +
+ serverName);
+ }
HRegionInfo region = incomingMsgs[i].getRegionInfo();
switch (incomingMsgs[i].getMsg()) {
+ case HMsg.MSG_REPORT_PROCESS_OPEN:
+ synchronized (this.assignAttempts) {
+ // Region server has acknowledged request to open region.
+ // Extend region open time by 1/2 max region open time.
+ assignAttempts.put(region.getRegionName(),
+ Long.valueOf(assignAttempts.get(
+ region.getRegionName()).longValue() +
+ (this.maxRegionOpenTime / 2)));
+ }
+ break;
+
case HMsg.MSG_REPORT_OPEN:
HRegionInfo regionInfo = unassignedRegions.get(region.getRegionName());
@@ -1484,9 +1503,7 @@ private HMsg[] processMsgs(HServerInfo info, HMsg incomingMsgs[])
} else {
LOG.info(info.getServerAddress().toString() + " serving " +
region.getRegionName());
- // Remove from unassigned list so we don't assign it to someone else
- this.unassignedRegions.remove(region.getRegionName());
- this.assignAttempts.remove(region.getRegionName());
+
if (region.getRegionName().compareTo(
HRegionInfo.rootRegionInfo.getRegionName()) == 0) {
// Store the Root Region location (in memory)
@@ -1495,21 +1512,23 @@ private HMsg[] processMsgs(HServerInfo info, HMsg incomingMsgs[])
new HServerAddress(info.getServerAddress()));
this.rootRegionLocation.notifyAll();
}
- break;
- }
-
- // Note that the table has been assigned and is waiting for the meta
- // table to be updated.
+ } else {
+ // Note that the table has been assigned and is waiting for the meta
+ // table to be updated.
- pendingRegions.add(region.getRegionName());
+ pendingRegions.add(region.getRegionName());
- // Queue up an update to note the region location.
+ // Queue up an update to note the region location.
- try {
- msgQueue.put(new ProcessRegionOpen(info, region));
- } catch (InterruptedException e) {
- throw new RuntimeException("Putting into msgQueue was interrupted.", e);
- }
+ try {
+ msgQueue.put(new ProcessRegionOpen(info, region));
+ } catch (InterruptedException e) {
+ throw new RuntimeException("Putting into msgQueue was interrupted.", e);
+ }
+ }
+ // Remove from unassigned list so we don't assign it to someone else
+ this.unassignedRegions.remove(region.getRegionName());
+ this.assignAttempts.remove(region.getRegionName());
}
break;
diff --git a/src/java/org/apache/hadoop/hbase/HMsg.java b/src/java/org/apache/hadoop/hbase/HMsg.java
index 21e118f1f7bb..488ff8f5ef9d 100644
--- a/src/java/org/apache/hadoop/hbase/HMsg.java
+++ b/src/java/org/apache/hadoop/hbase/HMsg.java
@@ -53,6 +53,9 @@ public class HMsg implements Writable {
/** region server is no longer serving the specified region */
public static final byte MSG_REPORT_CLOSE = 101;
+
+ /** region server is processing open request */
+ public static final byte MSG_REPORT_PROCESS_OPEN = 102;
/**
* region server split the region associated with this message.
@@ -142,6 +145,10 @@ public String toString() {
message.append("MSG_REGION_CLOSE_WITHOUT_REPORT : ");
break;
+ case MSG_REPORT_PROCESS_OPEN:
+ message.append("MSG_REPORT_PROCESS_OPEN : ");
+ break;
+
case MSG_REPORT_OPEN:
message.append("MSG_REPORT_OPEN : ");
break;
diff --git a/src/java/org/apache/hadoop/hbase/HRegionServer.java b/src/java/org/apache/hadoop/hbase/HRegionServer.java
index db11f31bc44e..74c3c7e21760 100644
--- a/src/java/org/apache/hadoop/hbase/HRegionServer.java
+++ b/src/java/org/apache/hadoop/hbase/HRegionServer.java
@@ -742,6 +742,10 @@ public void run() {
throw new RuntimeException("Putting into msgQueue was " +
"interrupted.", e);
}
+ if (msgs[i].getMsg() == HMsg.MSG_REGION_OPEN) {
+ outboundMsgs.add(new HMsg(HMsg.MSG_REPORT_PROCESS_OPEN,
+ msgs[i].getRegionInfo()));
+ }
}
}
}
@@ -982,11 +986,11 @@ synchronized void abort() {
* Presumption is that all closes and stops have already been called.
*/
void join() {
- join(this.workerThread);
join(this.logRoller);
join(this.cacheFlusher);
join(this.compactor);
join(this.splitter);
+ join(this.workerThread);
}
private void join(final Thread t) {
@@ -1161,8 +1165,8 @@ void openRegion(final HRegionInfo regionInfo) throws IOException {
} finally {
this.lock.writeLock().unlock();
}
+ reportOpen(region);
}
- reportOpen(region);
}
void closeRegion(final HRegionInfo hri, final boolean reportWhenCompleted)
diff --git a/src/java/org/apache/hadoop/hbase/Leases.java b/src/java/org/apache/hadoop/hbase/Leases.java
index c3219d4ce757..57d28b2fac9c 100644
--- a/src/java/org/apache/hadoop/hbase/Leases.java
+++ b/src/java/org/apache/hadoop/hbase/Leases.java
@@ -108,11 +108,13 @@ public void closeAfterLeasesExpire() {
public void close() {
LOG.info(Thread.currentThread().getName() + " closing leases");
this.stop.set(true);
- try {
- this.leaseMonitorThread.interrupt();
- this.leaseMonitorThread.join();
- } catch (InterruptedException iex) {
- // Ignore
+ while (this.leaseMonitorThread.isAlive()) {
+ try {
+ this.leaseMonitorThread.interrupt();
+ this.leaseMonitorThread.join();
+ } catch (InterruptedException iex) {
+ // Ignore
+ }
}
synchronized(leases) {
synchronized(sortedLeases) {
@@ -211,10 +213,16 @@ public void cancelLease(final long holderId, final long resourceId) {
* Its a daemon thread.
*/
class LeaseMonitor extends Chore {
+ /**
+ * @param p
+ * @param s
+ */
public LeaseMonitor(int p, AtomicBoolean s) {
super(p, s);
}
+ /** {@inheritDoc} */
+ @Override
protected void chore() {
synchronized(leases) {
synchronized(sortedLeases) {
diff --git a/src/java/org/apache/hadoop/hbase/LocalHBaseCluster.java b/src/java/org/apache/hadoop/hbase/LocalHBaseCluster.java
index 8a2c50f68c54..151f54200625 100644
--- a/src/java/org/apache/hadoop/hbase/LocalHBaseCluster.java
+++ b/src/java/org/apache/hadoop/hbase/LocalHBaseCluster.java
@@ -53,7 +53,9 @@ public class LocalHBaseCluster implements HConstants {
private final HMaster master;
private final List<RegionServerThread> regionThreads;
private final static int DEFAULT_NO = 1;
+ /** local mode */
public static final String LOCAL = "local";
+ /** 'local:' */
public static final String LOCAL_COLON = LOCAL + ":";
private final HBaseConfiguration conf;
@@ -146,12 +148,14 @@ public List<RegionServerThread> getRegionServers() {
public String waitOnRegionServer(int serverNumber) {
RegionServerThread regionServerThread =
this.regionThreads.remove(serverNumber);
- try {
- LOG.info("Waiting on " +
- regionServerThread.getRegionServer().serverInfo.toString());
- regionServerThread.join();
- } catch (InterruptedException e) {
- e.printStackTrace();
+ while (regionServerThread.isAlive()) {
+ try {
+ LOG.info("Waiting on " +
+ regionServerThread.getRegionServer().serverInfo.toString());
+ regionServerThread.join();
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
}
return regionServerThread.getName();
}
@@ -217,10 +221,12 @@ public void shutdown() {
}
}
if (this.master != null) {
- try {
- this.master.join();
- } catch(InterruptedException e) {
- // continue
+ while (this.master.isAlive()) {
+ try {
+ this.master.join();
+ } catch(InterruptedException e) {
+ // continue
+ }
}
}
LOG.info("Shutdown " +
diff --git a/src/test/org/apache/hadoop/hbase/DFSAbort.java b/src/test/org/apache/hadoop/hbase/DFSAbort.java
index a9c553e31323..4a30a75ac40b 100644
--- a/src/test/org/apache/hadoop/hbase/DFSAbort.java
+++ b/src/test/org/apache/hadoop/hbase/DFSAbort.java
@@ -22,19 +22,10 @@
import junit.framework.TestSuite;
import junit.textui.TestRunner;
-import java.io.PrintWriter;
-import org.apache.hadoop.util.ReflectionUtils;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
/**
* Test ability of HBase to handle DFS failure
*/
public class DFSAbort extends HBaseClusterTestCase {
- private static final Log LOG =
- LogFactory.getLog(DFSAbort.class.getName());
-
/** constructor */
public DFSAbort() {
super();
@@ -66,8 +57,6 @@ public void testDFSAbort() throws Exception {
// By now the Mini DFS is running, Mini HBase is running and we have
// created a table. Now let's yank the rug out from HBase
cluster.getDFSCluster().shutdown();
- // Now wait for Mini HBase Cluster to shut down
-// cluster.join();
threadDumpingJoin();
} catch (Exception e) {
e.printStackTrace();
diff --git a/src/test/org/apache/hadoop/hbase/TestLogRolling.java b/src/test/org/apache/hadoop/hbase/TestLogRolling.java
index ce7dd68dbc83..e382cdbf32e6 100644
--- a/src/test/org/apache/hadoop/hbase/TestLogRolling.java
+++ b/src/test/org/apache/hadoop/hbase/TestLogRolling.java
@@ -127,33 +127,41 @@ private void startAndWriteData() throws Exception {
this.server = cluster.getRegionThreads().get(0).getRegionServer();
this.log = server.getLog();
-
+
// When the META table can be opened, the region servers are running
- @SuppressWarnings("unused")
HTable meta = new HTable(conf, HConstants.META_TABLE_NAME);
- // Create the test table and open it
- HTableDescriptor desc = new HTableDescriptor(tableName);
- desc.addFamily(new HColumnDescriptor(HConstants.COLUMN_FAMILY.toString()));
- HBaseAdmin admin = new HBaseAdmin(conf);
- admin.createTable(desc);
- HTable table = new HTable(conf, new Text(tableName));
-
- for (int i = 1; i <= 2048; i++) { // 2048 writes should cause 8 log rolls
- long lockid =
- table.startUpdate(new Text("row" + String.format("%1$04d", i)));
- table.put(lockid, HConstants.COLUMN_FAMILY, value);
- table.commit(lockid);
-
- if (i % 256 == 0) {
- // After every 256 writes sleep to let the log roller run
-
- try {
- Thread.sleep(2000);
- } catch (InterruptedException e) {
- // continue
+ try {
+
+ // Create the test table and open it
+ HTableDescriptor desc = new HTableDescriptor(tableName);
+ desc.addFamily(new HColumnDescriptor(HConstants.COLUMN_FAMILY.toString()));
+ HBaseAdmin admin = new HBaseAdmin(conf);
+ admin.createTable(desc);
+ HTable table = new HTable(conf, new Text(tableName));
+
+ try {
+ for (int i = 1; i <= 2048; i++) { // 2048 writes should cause 8 log rolls
+ long lockid =
+ table.startUpdate(new Text("row" + String.format("%1$04d", i)));
+ table.put(lockid, HConstants.COLUMN_FAMILY, value);
+ table.commit(lockid);
+
+ if (i % 256 == 0) {
+ // After every 256 writes sleep to let the log roller run
+
+ try {
+ Thread.sleep(2000);
+ } catch (InterruptedException e) {
+ // continue
+ }
+ }
}
+ } finally {
+ table.close();
}
+ } finally {
+ meta.close();
}
}
|
b6ce0a1f69af1862f1577f5c2a0f96905d2af683
|
hadoop
|
YARN-2635. TestRM, TestRMRestart,- TestClientToAMTokens should run with both CS and FS. (Wei Yan and kasha via- kasha)--(cherry picked from commit 80d11eb68e60f88e16d7d41edecbddfc935a6b10)-
|
p
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 628dfa2e91167..9ce5d8dbb69dd 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -14,6 +14,9 @@ Release 2.7.0 - UNRELEASED
YARN-1979. TestDirectoryCollection fails when the umask is unusual.
(Vinod Kumar Vavilapalli and Tsuyoshi OZAWA via junping_du)
+ YARN-2635. TestRM, TestRMRestart, TestClientToAMTokens should run
+ with both CS and FS. (Wei Yan and kasha via kasha)
+
OPTIMIZATIONS
BUG FIXES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/ParameterizedSchedulerTestBase.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/ParameterizedSchedulerTestBase.java
new file mode 100644
index 0000000000000..cfd16001a37b6
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/ParameterizedSchedulerTestBase.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.resourcemanager;
+
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler;
+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairSchedulerConfiguration;
+
+
+import org.junit.Before;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.util.Arrays;
+import java.util.Collection;
+
+@RunWith(Parameterized.class)
+public abstract class ParameterizedSchedulerTestBase {
+ protected final static String TEST_DIR =
+ new File(System.getProperty("test.build.data", "/tmp")).getAbsolutePath();
+ private final static String FS_ALLOC_FILE =
+ new File(TEST_DIR, "test-fs-queues.xml").getAbsolutePath();
+
+ private SchedulerType schedulerType;
+ private YarnConfiguration conf = null;
+
+ public enum SchedulerType {
+ CAPACITY, FAIR
+ }
+
+ public ParameterizedSchedulerTestBase(SchedulerType type) {
+ schedulerType = type;
+ }
+
+ public YarnConfiguration getConf() {
+ return conf;
+ }
+
+ @Parameterized.Parameters
+ public static Collection<SchedulerType[]> getParameters() {
+ return Arrays.asList(new SchedulerType[][]{
+ {SchedulerType.CAPACITY}, {SchedulerType.FAIR}});
+ }
+
+ @Before
+ public void configureScheduler() throws IOException {
+ conf = new YarnConfiguration();
+ switch (schedulerType) {
+ case CAPACITY:
+ conf.set(YarnConfiguration.RM_SCHEDULER,
+ CapacityScheduler.class.getName());
+ break;
+ case FAIR:
+ configureFairScheduler(conf);
+ break;
+ }
+ }
+
+ private void configureFairScheduler(YarnConfiguration conf) throws IOException {
+ // Disable queueMaxAMShare limitation for fair scheduler
+ PrintWriter out = new PrintWriter(new FileWriter(FS_ALLOC_FILE));
+ out.println("<?xml version=\"1.0\"?>");
+ out.println("<allocations>");
+ out.println("<queueMaxAMShareDefault>-1.0</queueMaxAMShareDefault>");
+ out.println("</allocations>");
+ out.close();
+
+ conf.set(YarnConfiguration.RM_SCHEDULER, FairScheduler.class.getName());
+ conf.set(FairSchedulerConfiguration.ALLOCATION_FILE, FS_ALLOC_FILE);
+ }
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRM.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRM.java
index cd67ebc216bec..3d664f28848bf 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRM.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRM.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.yarn.server.resourcemanager;
+import org.junit.Before;
import static org.mockito.Matchers.argThat;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.spy;
@@ -65,7 +66,6 @@
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEventType;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
-import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.security.NMTokenSecretManagerInRM;
import org.apache.log4j.Level;
@@ -75,13 +75,23 @@
import org.mockito.ArgumentMatcher;
@SuppressWarnings({"unchecked", "rawtypes"})
-public class TestRM {
-
+public class TestRM extends ParameterizedSchedulerTestBase {
private static final Log LOG = LogFactory.getLog(TestRM.class);
// Milliseconds to sleep for when waiting for something to happen
private final static int WAIT_SLEEP_MS = 100;
+ private YarnConfiguration conf;
+
+ public TestRM(SchedulerType type) {
+ super(type);
+ }
+
+ @Before
+ public void setup() {
+ conf = getConf();
+ }
+
@After
public void tearDown() {
ClusterMetrics.destroy();
@@ -93,7 +103,7 @@ public void tearDown() {
public void testGetNewAppId() throws Exception {
Logger rootLogger = LogManager.getRootLogger();
rootLogger.setLevel(Level.DEBUG);
- MockRM rm = new MockRM();
+ MockRM rm = new MockRM(conf);
rm.start();
GetNewApplicationResponse resp = rm.getNewAppId();
@@ -106,7 +116,7 @@ public void testGetNewAppId() throws Exception {
public void testAppWithNoContainers() throws Exception {
Logger rootLogger = LogManager.getRootLogger();
rootLogger.setLevel(Level.DEBUG);
- MockRM rm = new MockRM();
+ MockRM rm = new MockRM(conf);
rm.start();
MockNM nm1 = rm.registerNode("h1:1234", 5120);
@@ -128,7 +138,6 @@ public void testAppWithNoContainers() throws Exception {
public void testAppOnMultiNode() throws Exception {
Logger rootLogger = LogManager.getRootLogger();
rootLogger.setLevel(Level.DEBUG);
- YarnConfiguration conf = new YarnConfiguration();
conf.set("yarn.scheduler.capacity.node-locality-delay", "-1");
MockRM rm = new MockRM(conf);
rm.start();
@@ -188,7 +197,6 @@ public void testAppOnMultiNode() throws Exception {
// corresponding NM Token.
@Test (timeout = 20000)
public void testNMTokenSentForNormalContainer() throws Exception {
- YarnConfiguration conf = new YarnConfiguration();
conf.set(YarnConfiguration.RM_SCHEDULER,
CapacityScheduler.class.getCanonicalName());
MockRM rm = new MockRM(conf);
@@ -240,7 +248,7 @@ public void testNMTokenSentForNormalContainer() throws Exception {
@Test (timeout = 40000)
public void testNMToken() throws Exception {
- MockRM rm = new MockRM();
+ MockRM rm = new MockRM(conf);
try {
rm.start();
MockNM nm1 = rm.registerNode("h1:1234", 10000);
@@ -422,8 +430,6 @@ protected void allocateContainersAndValidateNMTokens(MockAM am,
@Test (timeout = 300000)
public void testActivatingApplicationAfterAddingNM() throws Exception {
- YarnConfiguration conf = new YarnConfiguration();
-
MockRM rm1 = new MockRM(conf);
// start like normal because state is empty
@@ -469,7 +475,6 @@ public void testActivatingApplicationAfterAddingNM() throws Exception {
// is killed or failed, so that client doesn't get the wrong information.
@Test (timeout = 80000)
public void testInvalidateAMHostPortWhenAMFailedOrKilled() throws Exception {
- YarnConfiguration conf = new YarnConfiguration();
conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 1);
MockRM rm1 = new MockRM(conf);
rm1.start();
@@ -522,7 +527,6 @@ public void testInvalidateAMHostPortWhenAMFailedOrKilled() throws Exception {
@Test (timeout = 60000)
public void testInvalidatedAMHostPortOnAMRestart() throws Exception {
- YarnConfiguration conf = new YarnConfiguration();
MockRM rm1 = new MockRM(conf);
rm1.start();
MockNM nm1 =
@@ -555,7 +559,6 @@ public void testInvalidatedAMHostPortOnAMRestart() throws Exception {
@Test (timeout = 60000)
public void testApplicationKillAtAcceptedState() throws Exception {
- YarnConfiguration conf = new YarnConfiguration();
final Dispatcher dispatcher = new AsyncDispatcher() {
@Override
public EventHandler getEventHandler() {
@@ -632,15 +635,4 @@ protected Dispatcher createDispatcher() {
Assert.assertEquals(appsSubmitted + 1, metrics.getAppsSubmitted());
}
- public static void main(String[] args) throws Exception {
- TestRM t = new TestRM();
- t.testGetNewAppId();
- t.testAppWithNoContainers();
- t.testAppOnMultiNode();
- t.testNMToken();
- t.testActivatingApplicationAfterAddingNM();
- t.testInvalidateAMHostPortWhenAMFailedOrKilled();
- t.testInvalidatedAMHostPortOnAMRestart();
- t.testApplicationKillAtAcceptedState();
- }
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java
index 0b3a364c45564..b37b648ae8b8a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java
@@ -29,7 +29,6 @@
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.InetSocketAddress;
-import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
@@ -109,7 +108,7 @@
import org.junit.Before;
import org.junit.Test;
-public class TestRMRestart {
+public class TestRMRestart extends ParameterizedSchedulerTestBase {
private final static File TEMP_DIR = new File(System.getProperty(
"test.build.data", "/tmp"), "decommision");
private File hostFile = new File(TEMP_DIR + File.separator + "hostFile.txt");
@@ -117,12 +116,17 @@ public class TestRMRestart {
// Fake rmAddr for token-renewal
private static InetSocketAddress rmAddr;
+ private List<MockRM> rms = new ArrayList<MockRM>();
+
+ public TestRMRestart(SchedulerType type) {
+ super(type);
+ }
@Before
- public void setup() throws UnknownHostException {
+ public void setup() throws IOException {
+ conf = getConf();
Logger rootLogger = LogManager.getRootLogger();
rootLogger.setLevel(Level.DEBUG);
- conf = new YarnConfiguration();
UserGroupInformation.setConfiguration(conf);
conf.set(YarnConfiguration.RECOVERY_ENABLED, "true");
conf.set(YarnConfiguration.RM_STORE, MemoryRMStateStore.class.getName());
@@ -132,9 +136,24 @@ public void setup() throws UnknownHostException {
@After
public void tearDown() {
+ for (MockRM rm : rms) {
+ rm.stop();
+ }
+ rms.clear();
+
TEMP_DIR.delete();
}
+ /**
+ *
+ * @return a new MockRM that will be stopped at the end of the test.
+ */
+ private MockRM createMockRM(YarnConfiguration conf, RMStateStore store) {
+ MockRM rm = new MockRM(conf, store);
+ rms.add(rm);
+ return rm;
+ }
+
@SuppressWarnings("rawtypes")
@Test (timeout=180000)
public void testRMRestart() throws Exception {
@@ -151,7 +170,7 @@ public void testRMRestart() throws Exception {
// PHASE 1: create state in an RM
// start RM
- MockRM rm1 = new MockRM(conf, memStore);
+ MockRM rm1 = createMockRM(conf, memStore);
// start like normal because state is empty
rm1.start();
@@ -247,7 +266,7 @@ public void testRMRestart() throws Exception {
// PHASE 2: create new RM and start from old state
// create new RM to represent restart and recover state
- MockRM rm2 = new MockRM(conf, memStore);
+ MockRM rm2 = createMockRM(conf, memStore);
// start new RM
rm2.start();
@@ -315,7 +334,7 @@ public void testRMRestart() throws Exception {
NMContainerStatus status =
TestRMRestart
.createNMContainerStatus(loadedApp1.getCurrentAppAttempt()
- .getAppAttemptId(), 1, ContainerState.COMPLETE);
+ .getAppAttemptId(), 1, ContainerState.COMPLETE);
nm1.registerNode(Arrays.asList(status), null);
nm2.registerNode();
@@ -412,7 +431,7 @@ public void testRMRestartAppRunningAMFailed() throws Exception {
rmState.getApplicationState();
// start RM
- MockRM rm1 = new MockRM(conf, memStore);
+ MockRM rm1 = createMockRM(conf, memStore);
rm1.start();
MockNM nm1 =
new MockNM("127.0.0.1:1234", 15120, rm1.getResourceTrackerService());
@@ -438,13 +457,11 @@ public void testRMRestartAppRunningAMFailed() throws Exception {
rm1.waitForState(app0.getApplicationId(), RMAppState.ACCEPTED);
// start new RM
- MockRM rm2 = new MockRM(conf, memStore);
+ MockRM rm2 = createMockRM(conf, memStore);
rm2.start();
// assert the previous AM state is loaded back on RM recovery.
rm2.waitForState(am0.getApplicationAttemptId(), RMAppAttemptState.FAILED);
- rm1.stop();
- rm2.stop();
}
@Test (timeout = 60000)
@@ -468,7 +485,7 @@ public void testRMRestartWaitForPreviousAMToFinish() throws Exception {
rmState.getApplicationState();
// start RM
- final MockRM rm1 = new MockRM(conf, memStore);
+ final MockRM rm1 = createMockRM(conf, memStore);
rm1.start();
MockNM nm1 =
new MockNM("127.0.0.1:1234" , 16382, rm1.getResourceTrackerService());
@@ -492,8 +509,7 @@ public void testRMRestartWaitForPreviousAMToFinish() throws Exception {
.getAppAttemptState(), RMAppAttemptState.RUNNING);
// start new RM.
- MockRM rm2 = null;
- rm2 = new MockRM(conf, memStore);
+ MockRM rm2 = createMockRM(conf, memStore);
rm2.start();
nm1.setResourceTrackerService(rm2.getResourceTrackerService());
@@ -520,7 +536,7 @@ public void testRMRestartWaitForPreviousAMToFinish() throws Exception {
NMContainerStatus status =
TestRMRestart.createNMContainerStatus(
- am2.getApplicationAttemptId(), 1, ContainerState.COMPLETE);
+ am2.getApplicationAttemptId(), 1, ContainerState.COMPLETE);
nm1.registerNode(Arrays.asList(status), null);
rm2.waitForState(am2.getApplicationAttemptId(), RMAppAttemptState.FAILED);
launchAM(rmApp, rm2, nm1);
@@ -530,8 +546,7 @@ public void testRMRestartWaitForPreviousAMToFinish() throws Exception {
// Now restart RM ...
// Setting AMLivelinessMonitor interval to be 10 Secs.
conf.setInt(YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS, 10000);
- MockRM rm3 = null;
- rm3 = new MockRM(conf, memStore);
+ MockRM rm3 = createMockRM(conf, memStore);
rm3.start();
// Wait for RM to process all the events as a part of rm recovery.
@@ -578,8 +593,7 @@ public void testRMRestartWaitForPreviousAMToFinish() throws Exception {
memStore.getState().getApplicationState().get(app2.getApplicationId())
.getAttemptCount());
- MockRM rm4 = null;
- rm4 = new MockRM(conf, memStore);
+ MockRM rm4 = createMockRM(conf, memStore);
rm4.start();
rmApp = rm4.getRMContext().getRMApps().get(app1.getApplicationId());
@@ -635,7 +649,7 @@ public void updateApplicationStateInternal(ApplicationId appId,
rmState.getApplicationState();
// start RM
- MockRM rm1 = new MockRM(conf, memStore);
+ MockRM rm1 = createMockRM(conf, memStore);
rm1.start();
MockNM nm1 = rm1.registerNode("127.0.0.1:1234", 15120);
RMApp app0 = rm1.submitApp(200);
@@ -652,7 +666,7 @@ public void updateApplicationStateInternal(ApplicationId appId,
Assert.assertNull(rmAppState.get(app0.getApplicationId()).getState());
// start RM
- MockRM rm2 = new MockRM(conf, memStore);
+ MockRM rm2 = createMockRM(conf, memStore);
nm1.setResourceTrackerService(rm2.getResourceTrackerService());
rm2.start();
@@ -661,7 +675,7 @@ public void updateApplicationStateInternal(ApplicationId appId,
rm2.waitForState(app0.getApplicationId(), RMAppState.FINISHED);
// app final state is saved via the finish event from attempt.
Assert.assertEquals(RMAppState.FINISHED,
- rmAppState.get(app0.getApplicationId()).getState());
+ rmAppState.get(app0.getApplicationId()).getState());
}
@Test (timeout = 60000)
@@ -674,7 +688,7 @@ public void testRMRestartFailedApp() throws Exception {
rmState.getApplicationState();
// start RM
- MockRM rm1 = new MockRM(conf, memStore);
+ MockRM rm1 = createMockRM(conf, memStore);
rm1.start();
MockNM nm1 =
new MockNM("127.0.0.1:1234", 15120, rm1.getResourceTrackerService());
@@ -696,7 +710,7 @@ public void testRMRestartFailedApp() throws Exception {
appState.getAttempt(am0.getApplicationAttemptId()).getState());
// start new RM
- MockRM rm2 = new MockRM(conf, memStore);
+ MockRM rm2 = createMockRM(conf, memStore);
rm2.start();
RMApp loadedApp0 = rm2.getRMContext().getRMApps().get(app0.getApplicationId());
rm2.waitForState(app0.getApplicationId(), RMAppState.FAILED);
@@ -709,8 +723,6 @@ public void testRMRestartFailedApp() throws Exception {
.contains("Failing the application."));
// failed diagnostics from attempt is lost because the diagnostics from
// attempt is not yet available by the time app is saving the app state.
- rm1.stop();
- rm2.stop();
}
@Test (timeout = 60000)
@@ -724,7 +736,7 @@ public void testRMRestartKilledApp() throws Exception{
rmState.getApplicationState();
// start RM
- MockRM rm1 = new MockRM(conf, memStore);
+ MockRM rm1 = createMockRM(conf, memStore);
rm1.start();
MockNM nm1 =
new MockNM("127.0.0.1:1234", 15120, rm1.getResourceTrackerService());
@@ -746,7 +758,7 @@ public void testRMRestartKilledApp() throws Exception{
appState.getAttempt(am0.getApplicationAttemptId()).getState());
// restart rm
- MockRM rm2 = new MockRM(conf, memStore);
+ MockRM rm2 = createMockRM(conf, memStore);
rm2.start();
RMApp loadedApp0 = rm2.getRMContext().getRMApps().get(app0.getApplicationId());
rm2.waitForState(app0.getApplicationId(), RMAppState.KILLED);
@@ -756,9 +768,7 @@ public void testRMRestartKilledApp() throws Exception{
ApplicationReport appReport = verifyAppReportAfterRMRestart(app0, rm2);
Assert.assertEquals(app0.getDiagnostics().toString(),
- appReport.getDiagnostics());
- rm1.stop();
- rm2.stop();
+ appReport.getDiagnostics());
}
@Test (timeout = 60000)
@@ -781,7 +791,7 @@ public synchronized void updateApplicationAttemptStateInternal(
memStore.init(conf);
// start RM
- MockRM rm1 = new MockRM(conf, memStore);
+ MockRM rm1 = createMockRM(conf, memStore);
rm1.start();
// create app
RMApp app0 =
@@ -793,7 +803,7 @@ public synchronized void updateApplicationAttemptStateInternal(
rm1.waitForState(app0.getApplicationId(), RMAppState.KILLED);
// restart rm
- MockRM rm2 = new MockRM(conf, memStore);
+ MockRM rm2 = createMockRM(conf, memStore);
rm2.start();
RMApp loadedApp0 =
rm2.getRMContext().getRMApps().get(app0.getApplicationId());
@@ -812,7 +822,7 @@ public void testRMRestartSucceededApp() throws Exception {
rmState.getApplicationState();
// start RM
- MockRM rm1 = new MockRM(conf, memStore);
+ MockRM rm1 = createMockRM(conf, memStore);
rm1.start();
MockNM nm1 =
new MockNM("127.0.0.1:1234", 15120, rm1.getResourceTrackerService());
@@ -839,7 +849,7 @@ public void testRMRestartSucceededApp() throws Exception {
Assert.assertEquals(app0.getFinishTime(), appState.getFinishTime());
// restart rm
- MockRM rm2 = new MockRM(conf, memStore);
+ MockRM rm2 = createMockRM(conf, memStore);
rm2.start();
// verify application report returns the same app info as the app info
@@ -848,9 +858,6 @@ public void testRMRestartSucceededApp() throws Exception {
Assert.assertEquals(FinalApplicationStatus.SUCCEEDED,
appReport.getFinalApplicationStatus());
Assert.assertEquals("trackingUrl", appReport.getOriginalTrackingUrl());
-
- rm1.stop();
- rm2.stop();
}
@Test (timeout = 60000)
@@ -860,7 +867,7 @@ public void testRMRestartGetApplicationList() throws Exception {
memStore.init(conf);
// start RM
- MockRM rm1 = new MockRM(conf, memStore);
+ MockRM rm1 = createMockRM(conf, memStore);
rm1.start();
MockNM nm1 =
new MockNM("127.0.0.1:1234", 15120, rm1.getResourceTrackerService());
@@ -897,7 +904,7 @@ protected RMAppManager createRMAppManager() {
return spy(super.createRMAppManager());
}
};
-
+ rms.add(rm2);
rm2.start();
GetApplicationsRequest request1 =
@@ -944,9 +951,6 @@ protected RMAppManager createRMAppManager() {
// check application summary is logged for the completed apps after RM restart.
verify(rm2.getRMAppManager(), times(3)).logApplicationSummary(
isA(ApplicationId.class));
-
- rm1.stop();
- rm2.stop();
}
private MockAM launchAM(RMApp app, MockRM rm, MockNM nm)
@@ -1012,7 +1016,7 @@ public void testRMRestartOnMaxAppAttempts() throws Exception {
Map<ApplicationId, ApplicationState> rmAppState =
rmState.getApplicationState();
- MockRM rm1 = new MockRM(conf, memStore);
+ MockRM rm1 = createMockRM(conf, memStore);
rm1.start();
MockNM nm1 =
new MockNM("127.0.0.1:1234", 15120, rm1.getResourceTrackerService());
@@ -1050,7 +1054,7 @@ public void testRMRestartOnMaxAppAttempts() throws Exception {
// Setting AMLivelinessMonitor interval to be 3 Secs.
conf.setInt(YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS, 3000);
// start new RM
- MockRM rm2 = new MockRM(conf, memStore);
+ MockRM rm2 = createMockRM(conf, memStore);
rm2.start();
// verify that maxAppAttempts is set to global value
@@ -1069,10 +1073,6 @@ public void testRMRestartOnMaxAppAttempts() throws Exception {
Assert.assertEquals(RMAppState.FAILED,
rmAppState.get(app1.getApplicationId()).getState());
Assert.assertNull(rmAppState.get(app2.getApplicationId()).getState());
-
- // stop the RM
- rm1.stop();
- rm2.stop();
}
@Test (timeout = 60000)
@@ -1154,10 +1154,6 @@ public void testDelegationTokenRestoredInDelegationTokenRenewer()
// verify tokens are properly populated back to rm2 DelegationTokenRenewer
Assert.assertEquals(tokenSet, rm2.getRMContext()
.getDelegationTokenRenewer().getDelegationTokens());
-
- // stop the RM
- rm1.stop();
- rm2.stop();
}
private void waitForTokensToBeRenewed(MockRM rm2) throws Exception {
@@ -1253,8 +1249,6 @@ public void testAppAttemptTokensRestoredOnRMRestart() throws Exception {
Assert.assertArrayEquals(amrmToken.getPassword(),
rm2.getRMContext().getAMRMTokenSecretManager().retrievePassword(
amrmToken.decodeIdentifier()));
- rm1.stop();
- rm2.stop();
}
@Test (timeout = 60000)
@@ -1402,10 +1396,6 @@ public void testRMDelegationTokenRestoredOnRMRestart() throws Exception {
.getAllTokens();
Assert.assertFalse(allTokensRM2.containsKey(dtId1));
Assert.assertFalse(rmDTState.containsKey(dtId1));
-
- // stop the RM
- rm1.stop();
- rm2.stop();
}
// This is to test submit an application to the new RM with the old delegation
@@ -1466,7 +1456,7 @@ protected void handleStoreEvent(RMStateStoreEvent event) {
memStore.init(conf);
// start RM
- final MockRM rm1 = new MockRM(conf, memStore);
+ final MockRM rm1 = createMockRM(conf, memStore);
rm1.start();
// create apps.
@@ -1512,7 +1502,7 @@ public void testFinishedAppRemovalAfterRMRestart() throws Exception {
RMState rmState = memStore.getState();
// start RM
- MockRM rm1 = new MockRM(conf, memStore);
+ MockRM rm1 = createMockRM(conf, memStore);
rm1.start();
MockNM nm1 =
new MockNM("127.0.0.1:1234", 15120, rm1.getResourceTrackerService());
@@ -1523,7 +1513,7 @@ public void testFinishedAppRemovalAfterRMRestart() throws Exception {
MockAM am0 = launchAM(app0, rm1, nm1);
finishApplicationMaster(app0, rm1, nm1, am0);
- MockRM rm2 = new MockRM(conf, memStore);
+ MockRM rm2 = createMockRM(conf, memStore);
rm2.start();
nm1.setResourceTrackerService(rm2.getResourceTrackerService());
nm1 = rm2.registerNode("127.0.0.1:1234", 15120);
@@ -1545,9 +1535,6 @@ public void testFinishedAppRemovalAfterRMRestart() throws Exception {
Assert.assertNull(rm2.getRMContext().getRMApps()
.get(app0.getApplicationId()));
Assert.assertNull(rmAppState.get(app0.getApplicationId()));
-
- rm1.stop();
- rm2.stop();
}
// This is to test RM does not get hang on shutdown.
@@ -1564,7 +1551,7 @@ public synchronized void checkVersion()
memStore.init(conf);
MockRM rm1 = null;
try {
- rm1 = new MockRM(conf, memStore);
+ rm1 = createMockRM(conf, memStore);
rm1.start();
Assert.fail();
} catch (Exception e) {
@@ -1582,7 +1569,7 @@ public void testClientRetryOnKillingApplication() throws Exception {
memStore.init(conf);
// start RM
- MockRM rm1 = new MockRM(conf, memStore);
+ MockRM rm1 = createMockRM(conf, memStore);
rm1.start();
MockNM nm1 =
new MockNM("127.0.0.1:1234", 15120, rm1.getResourceTrackerService());
@@ -1698,7 +1685,11 @@ protected void recoverApplication(ApplicationState appState,
}
}
};
- rm1.start();
+ try {
+ rm1.start();
+ } finally {
+ rm1.stop();
+ }
}
@SuppressWarnings("resource")
@@ -1711,7 +1702,7 @@ public void testQueueMetricsOnRMRestart() throws Exception {
// PHASE 1: create state in an RM
// start RM
- MockRM rm1 = new MockRM(conf, memStore);
+ MockRM rm1 = createMockRM(conf, memStore);
rm1.start();
MockNM nm1 =
new MockNM("127.0.0.1:1234", 15120, rm1.getResourceTrackerService());
@@ -1749,7 +1740,7 @@ public void testQueueMetricsOnRMRestart() throws Exception {
// PHASE 2: create new RM and start from old state
// create new RM to represent restart and recover state
- MockRM rm2 = new MockRM(conf, memStore);
+ MockRM rm2 = createMockRM(conf, memStore);
QueueMetrics qm2 = rm2.getResourceScheduler().getRootQueueMetrics();
resetQueueMetrics(qm2);
assertQueueMetrics(qm2, 0, 0, 0, 0);
@@ -1766,7 +1757,7 @@ public void testQueueMetricsOnRMRestart() throws Exception {
NMContainerStatus status =
TestRMRestart
.createNMContainerStatus(loadedApp1.getCurrentAppAttempt()
- .getAppAttemptId(), 1, ContainerState.COMPLETE);
+ .getAppAttemptId(), 1, ContainerState.COMPLETE);
nm1.registerNode(Arrays.asList(status), null);
while (loadedApp1.getAppAttempts().size() != 2) {
@@ -1795,10 +1786,6 @@ public void testQueueMetricsOnRMRestart() throws Exception {
// finish the AMs
finishApplicationMaster(loadedApp1, rm2, nm1, am1);
assertQueueMetrics(qm2, 1, 0, 0, 1);
-
- // stop RM's
- rm2.stop();
- rm1.stop();
}
@@ -1836,43 +1823,58 @@ public void testDecomissionedNMsMetricsOnRMRestart() throws Exception {
hostFile.getAbsolutePath());
writeToHostsFile("");
final DrainDispatcher dispatcher = new DrainDispatcher();
- MockRM rm1 = new MockRM(conf) {
- @Override
- protected Dispatcher createDispatcher() {
- return dispatcher;
+ MockRM rm1 = null, rm2 = null;
+ try {
+ rm1 = new MockRM(conf) {
+ @Override
+ protected Dispatcher createDispatcher() {
+ return dispatcher;
+ }
+ };
+ rm1.start();
+ MockNM nm1 = rm1.registerNode("localhost:1234", 8000);
+ MockNM nm2 = rm1.registerNode("host2:1234", 8000);
+ Assert
+ .assertEquals(0,
+ ClusterMetrics.getMetrics().getNumDecommisionedNMs());
+ String ip = NetUtils.normalizeHostName("localhost");
+ // Add 2 hosts to exclude list.
+ writeToHostsFile("host2", ip);
+
+ // refresh nodes
+ rm1.getNodesListManager().refreshNodes(conf);
+ NodeHeartbeatResponse nodeHeartbeat = nm1.nodeHeartbeat(true);
+ Assert
+ .assertTrue(
+ NodeAction.SHUTDOWN.equals(nodeHeartbeat.getNodeAction()));
+ nodeHeartbeat = nm2.nodeHeartbeat(true);
+ Assert.assertTrue("The decommisioned metrics are not updated",
+ NodeAction.SHUTDOWN.equals(nodeHeartbeat.getNodeAction()));
+
+ dispatcher.await();
+ Assert
+ .assertEquals(2,
+ ClusterMetrics.getMetrics().getNumDecommisionedNMs());
+ rm1.stop();
+ rm1 = null;
+ Assert
+ .assertEquals(0,
+ ClusterMetrics.getMetrics().getNumDecommisionedNMs());
+
+ // restart RM.
+ rm2 = new MockRM(conf);
+ rm2.start();
+ Assert
+ .assertEquals(2,
+ ClusterMetrics.getMetrics().getNumDecommisionedNMs());
+ } finally {
+ if (rm1 != null) {
+ rm1.stop();
}
- };
- rm1.start();
- MockNM nm1 = rm1.registerNode("localhost:1234", 8000);
- MockNM nm2 = rm1.registerNode("host2:1234", 8000);
- Assert
- .assertEquals(0, ClusterMetrics.getMetrics().getNumDecommisionedNMs());
- String ip = NetUtils.normalizeHostName("localhost");
- // Add 2 hosts to exclude list.
- writeToHostsFile("host2", ip);
-
- // refresh nodes
- rm1.getNodesListManager().refreshNodes(conf);
- NodeHeartbeatResponse nodeHeartbeat = nm1.nodeHeartbeat(true);
- Assert
- .assertTrue(NodeAction.SHUTDOWN.equals(nodeHeartbeat.getNodeAction()));
- nodeHeartbeat = nm2.nodeHeartbeat(true);
- Assert.assertTrue("The decommisioned metrics are not updated",
- NodeAction.SHUTDOWN.equals(nodeHeartbeat.getNodeAction()));
-
- dispatcher.await();
- Assert
- .assertEquals(2, ClusterMetrics.getMetrics().getNumDecommisionedNMs());
- rm1.stop();
- Assert
- .assertEquals(0, ClusterMetrics.getMetrics().getNumDecommisionedNMs());
-
- // restart RM.
- MockRM rm2 = new MockRM(conf);
- rm2.start();
- Assert
- .assertEquals(2, ClusterMetrics.getMetrics().getNumDecommisionedNMs());
- rm2.stop();
+ if (rm2 != null) {
+ rm2.stop();
+ }
+ }
}
// Test Delegation token is renewed synchronously so that recover events
@@ -1887,7 +1889,7 @@ public void testSynchronouslyRenewDTOnRecovery() throws Exception {
memStore.init(conf);
// start RM
- MockRM rm1 = new MockRM(conf, memStore);
+ MockRM rm1 = createMockRM(conf, memStore);
rm1.start();
final MockNM nm1 =
new MockNM("127.0.0.1:1234", 15120, rm1.getResourceTrackerService());
@@ -1910,24 +1912,29 @@ protected void serviceStart() throws Exception {
nm1.setResourceTrackerService(getResourceTrackerService());
NMContainerStatus status =
TestRMRestart.createNMContainerStatus(
- am0.getApplicationAttemptId(), 1, ContainerState.COMPLETE);
+ am0.getApplicationAttemptId(), 1, ContainerState.COMPLETE);
nm1.registerNode(Arrays.asList(status), null);
}
};
}
};
- // Re-start RM
- rm2.start();
- // wait for the 2nd attempt to be started.
- RMApp loadedApp0 =
- rm2.getRMContext().getRMApps().get(app0.getApplicationId());
- int timeoutSecs = 0;
- while (loadedApp0.getAppAttempts().size() != 2 && timeoutSecs++ < 40) {
- Thread.sleep(200);
+ try {
+ // Re-start RM
+ rm2.start();
+
+ // wait for the 2nd attempt to be started.
+ RMApp loadedApp0 =
+ rm2.getRMContext().getRMApps().get(app0.getApplicationId());
+ int timeoutSecs = 0;
+ while (loadedApp0.getAppAttempts().size() != 2 && timeoutSecs++ < 40) {
+ Thread.sleep(200);
+ }
+ MockAM am1 = MockRM.launchAndRegisterAM(loadedApp0, rm2, nm1);
+ MockRM.finishAMAndVerifyAppState(loadedApp0, rm2, nm1, am1);
+ } finally {
+ rm2.stop();
}
- MockAM am1 = MockRM.launchAndRegisterAM(loadedApp0, rm2, nm1);
- MockRM.finishAMAndVerifyAppState(loadedApp0, rm2, nm1, am1);
}
private void writeToHostsFile(String... hosts) throws IOException {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java
index 0dcd228453e96..8b113a0021336 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java
@@ -18,7 +18,11 @@
package org.apache.hadoop.yarn.server.resourcemanager.security;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.server.resourcemanager
+ .ParameterizedSchedulerTestBase;
import static org.junit.Assert.fail;
+import org.junit.Before;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -74,7 +78,17 @@
import org.apache.hadoop.yarn.util.Records;
import org.junit.Test;
-public class TestClientToAMTokens {
+public class TestClientToAMTokens extends ParameterizedSchedulerTestBase {
+ private YarnConfiguration conf;
+
+ public TestClientToAMTokens(SchedulerType type) {
+ super(type);
+ }
+
+ @Before
+ public void setup() {
+ conf = getConf();
+ }
private interface CustomProtocol {
@SuppressWarnings("unused")
@@ -151,8 +165,6 @@ protected void serviceStart() throws Exception {
@Test
public void testClientToAMTokens() throws Exception {
-
- final Configuration conf = new Configuration();
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
"kerberos");
UserGroupInformation.setConfiguration(conf);
@@ -267,6 +279,8 @@ public RegisterApplicationMasterResponse run() {
// Now for an authenticated user
verifyValidToken(conf, am, token);
+
+ rm.stop();
}
private void verifyTokenWithTamperedID(final Configuration conf,
|
3fd458ad88808e542b211461a49728138c1ebe79
|
hbase
|
HBASE-6427 Pluggable compaction and scan policies- via coprocessors--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1367361 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/hbase
|
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
index feb9aa391877..3607e7dbe178 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
@@ -17,7 +17,7 @@
package org.apache.hadoop.hbase.coprocessor;
import java.util.List;
-import java.util.Map;
+import java.util.NavigableSet;
import com.google.common.collect.ImmutableList;
@@ -37,7 +37,9 @@
import org.apache.hadoop.hbase.filter.WritableByteArrayComparable;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
+import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.hadoop.hbase.regionserver.ScanType;
import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
@@ -74,6 +76,13 @@ public void preClose(ObserverContext<RegionCoprocessorEnvironment> e,
public void postClose(ObserverContext<RegionCoprocessorEnvironment> e,
boolean abortRequested) { }
+ @Override
+ public InternalScanner preFlushScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
+ final Store store, final KeyValueScanner memstoreScanner, final InternalScanner s)
+ throws IOException {
+ return null;
+ }
+
@Override
public void preFlush(ObserverContext<RegionCoprocessorEnvironment> e) throws IOException {
}
@@ -82,6 +91,17 @@ public void preFlush(ObserverContext<RegionCoprocessorEnvironment> e) throws IOE
public void postFlush(ObserverContext<RegionCoprocessorEnvironment> e) throws IOException {
}
+ @Override
+ public InternalScanner preFlush(ObserverContext<RegionCoprocessorEnvironment> e, Store store,
+ InternalScanner scanner) throws IOException {
+ return scanner;
+ }
+
+ @Override
+ public void postFlush(ObserverContext<RegionCoprocessorEnvironment> e, Store store,
+ StoreFile resultFile) throws IOException {
+ }
+
@Override
public void preSplit(ObserverContext<RegionCoprocessorEnvironment> e) throws IOException {
}
@@ -105,6 +125,13 @@ public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment>
return scanner;
}
+ @Override
+ public InternalScanner preCompactScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
+ final Store store, List<? extends KeyValueScanner> scanners, final ScanType scanType,
+ final long earliestPutTs, final InternalScanner s) throws IOException {
+ return null;
+ }
+
@Override
public void postCompact(ObserverContext<RegionCoprocessorEnvironment> e, final Store store,
final StoreFile resultFile) throws IOException {
@@ -241,6 +268,13 @@ public RegionScanner preScannerOpen(final ObserverContext<RegionCoprocessorEnvir
return s;
}
+ @Override
+ public KeyValueScanner preStoreScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
+ final Store store, final Scan scan, final NavigableSet<byte[]> targetCols,
+ final KeyValueScanner s) throws IOException {
+ return null;
+ }
+
@Override
public RegionScanner postScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> e,
final Scan scan, final RegionScanner s) throws IOException {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
index c5b858eaca86..c3cfa097bbb7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
@@ -18,6 +18,7 @@
import java.io.IOException;
import java.util.List;
+import java.util.NavigableSet;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@@ -35,9 +36,12 @@
import org.apache.hadoop.hbase.filter.WritableByteArrayComparable;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
+import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.hadoop.hbase.regionserver.ScanType;
import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.StoreFileScanner;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
@@ -64,20 +68,63 @@ public interface RegionObserver extends Coprocessor {
*/
void postOpen(final ObserverContext<RegionCoprocessorEnvironment> c);
+ /**
+ * Called before a memstore is flushed to disk and prior to creating the scanner to read from
+ * the memstore. To override or modify how a memstore is flushed,
+ * implementing classes can return a new scanner to provide the KeyValues to be
+ * stored into the new {@code StoreFile} or null to perform the default processing.
+ * Calling {@link org.apache.hadoop.hbase.coprocessor.ObserverContext#bypass()} has no
+ * effect in this hook.
+ * @param c the environment provided by the region server
+ * @param store the store being flushed
+ * @param memstoreScanner the scanner for the memstore that is flushed
+ * @param s the base scanner, if not {@code null}, from previous RegionObserver in the chain
+ * @return the scanner to use during the flush. {@code null} if the default implementation
+ * is to be used.
+ * @throws IOException if an error occurred on the coprocessor
+ */
+ InternalScanner preFlushScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
+ final Store store, final KeyValueScanner memstoreScanner, final InternalScanner s)
+ throws IOException;
+
/**
* Called before the memstore is flushed to disk.
* @param c the environment provided by the region server
* @throws IOException if an error occurred on the coprocessor
+ * @deprecated use {@link #preFlush(ObserverContext, Store, InternalScanner)} instead
*/
void preFlush(final ObserverContext<RegionCoprocessorEnvironment> c) throws IOException;
+ /**
+ * Called before a Store's memstore is flushed to disk.
+ * @param c the environment provided by the region server
+ * @param store the store where compaction is being requested
+ * @param scanner the scanner over existing data used in the store file
+ * @return the scanner to use during compaction. Should not be {@code null}
+ * unless the implementation is writing new store files on its own.
+ * @throws IOException if an error occurred on the coprocessor
+ */
+ InternalScanner preFlush(final ObserverContext<RegionCoprocessorEnvironment> c, final Store store,
+ final InternalScanner scanner) throws IOException;
+
/**
* Called after the memstore is flushed to disk.
* @param c the environment provided by the region server
* @throws IOException if an error occurred on the coprocessor
+ * @deprecated use {@link #preFlush(ObserverContext, Store, InternalScanner)} instead.
*/
void postFlush(final ObserverContext<RegionCoprocessorEnvironment> c) throws IOException;
+ /**
+ * Called after a Store's memstore is flushed to disk.
+ * @param c the environment provided by the region server
+ * @param store the store being flushed
+ * @param resultFile the new store file written out during compaction
+ * @throws IOException if an error occurred on the coprocessor
+ */
+ void postFlush(final ObserverContext<RegionCoprocessorEnvironment> c, final Store store,
+ final StoreFile resultFile) throws IOException;
+
/**
* Called prior to selecting the {@link StoreFile}s to compact from the list
* of available candidates. To alter the files used for compaction, you may
@@ -127,6 +174,29 @@ void postCompactSelection(final ObserverContext<RegionCoprocessorEnvironment> c,
InternalScanner preCompact(final ObserverContext<RegionCoprocessorEnvironment> c,
final Store store, final InternalScanner scanner) throws IOException;
+ /**
+ * Called prior to writing the {@link StoreFile}s selected for compaction into
+ * a new {@code StoreFile} and prior to creating the scanner used to read the
+ * input files. To override or modify the compaction process,
+ * implementing classes can return a new scanner to provide the KeyValues to be
+ * stored into the new {@code StoreFile} or null to perform the default processing.
+ * Calling {@link org.apache.hadoop.hbase.coprocessor.ObserverContext#bypass()} has no
+ * effect in this hook.
+ * @param c the environment provided by the region server
+ * @param store the store being compacted
+ * @param scanners the list {@link StoreFileScanner}s to be read from
+ * @param scantype the {@link ScanType} indicating whether this is a major or minor compaction
+ * @param earliestPutTs timestamp of the earliest put that was found in any of the involved
+ * store files
+ * @param s the base scanner, if not {@code null}, from previous RegionObserver in the chain
+ * @return the scanner to use during compaction. {@code null} if the default implementation
+ * is to be used.
+ * @throws IOException if an error occurred on the coprocessor
+ */
+ InternalScanner preCompactScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
+ final Store store, List<? extends KeyValueScanner> scanners, final ScanType scanType,
+ final long earliestPutTs, final InternalScanner s) throws IOException;
+
/**
* Called after compaction has completed and the new store file has been
* moved in to place.
@@ -549,6 +619,30 @@ RegionScanner preScannerOpen(final ObserverContext<RegionCoprocessorEnvironment>
final Scan scan, final RegionScanner s)
throws IOException;
+ /**
+ * Called before a store opens a new scanner.
+ * This hook is called when a "user" scanner is opened.
+ * <p>
+ * See {@link #preFlushScannerOpen(ObserverContext, Store, KeyValueScanner, InternalScanner)}
+ * and {@link #preCompactScannerOpen(ObserverContext, Store, List, ScanType, long, InternalScanner)}
+ * to override scanners created for flushes or compactions, resp.
+ * <p>
+ * Call CoprocessorEnvironment#complete to skip any subsequent chained
+ * coprocessors.
+ * Calling {@link org.apache.hadoop.hbase.coprocessor.ObserverContext#bypass()} has no
+ * effect in this hook.
+ * @param c the environment provided by the region server
+ * @param store the store being scanned
+ * @param scan the Scan specification
+ * @param targetCols columns to be used in the scanner
+ * @param s the base scanner, if not {@code null}, from previous RegionObserver in the chain
+ * @return a KeyValueScanner instance to use or {@code null} to use the default implementation
+ * @throws IOException if an error occurred on the coprocessor
+ */
+ KeyValueScanner preStoreScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
+ final Store store, final Scan scan, final NavigableSet<byte[]> targetCols,
+ final KeyValueScanner s) throws IOException;
+
/**
* Called after the client opens a new scanner.
* <p>
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Compactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Compactor.java
index 9ed051f8be3e..b606458e6e1d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Compactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Compactor.java
@@ -32,7 +32,6 @@
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.hfile.Compression;
-import org.apache.hadoop.hbase.regionserver.StoreScanner.ScanType;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionProgress;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.StringUtils;
@@ -127,12 +126,21 @@ StoreFile.Writer compact(final Store store,
try {
InternalScanner scanner = null;
try {
- Scan scan = new Scan();
- scan.setMaxVersions(store.getFamily().getMaxVersions());
- /* Include deletes, unless we are doing a major compaction */
- scanner = new StoreScanner(store, scan, scanners,
- majorCompaction? ScanType.MAJOR_COMPACT : ScanType.MINOR_COMPACT,
- smallestReadPoint, earliestPutTs);
+ if (store.getHRegion().getCoprocessorHost() != null) {
+ scanner = store
+ .getHRegion()
+ .getCoprocessorHost()
+ .preCompactScannerOpen(store, scanners,
+ majorCompaction ? ScanType.MAJOR_COMPACT : ScanType.MINOR_COMPACT, earliestPutTs);
+ }
+ if (scanner == null) {
+ Scan scan = new Scan();
+ scan.setMaxVersions(store.getFamily().getMaxVersions());
+ /* Include deletes, unless we are doing a major compaction */
+ scanner = new StoreScanner(store, store.scanInfo, scan, scanners,
+ majorCompaction? ScanType.MAJOR_COMPACT : ScanType.MINOR_COMPACT,
+ smallestReadPoint, earliestPutTs);
+ }
if (store.getHRegion().getCoprocessorHost() != null) {
InternalScanner cpScanner =
store.getHRegion().getCoprocessorHost().preCompact(store, scanner);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 7df5e72d26d3..36d6bacd070f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -1216,7 +1216,7 @@ void triggerMajorCompaction() {
* @param majorCompaction True to force a major compaction regardless of thresholds
* @throws IOException e
*/
- void compactStores(final boolean majorCompaction)
+ public void compactStores(final boolean majorCompaction)
throws IOException {
if (majorCompaction) {
this.triggerMajorCompaction();
@@ -3469,7 +3469,7 @@ public HRegionInfo getRegionInfo() {
for (Map.Entry<byte[], NavigableSet<byte[]>> entry :
scan.getFamilyMap().entrySet()) {
Store store = stores.get(entry.getKey());
- StoreScanner scanner = store.getScanner(scan, entry.getValue());
+ KeyValueScanner scanner = store.getScanner(scan, entry.getValue());
scanners.add(scanner);
}
this.storeHeap = new KeyValueHeap(scanners, comparator);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
index 58afaf439b9f..f6efea5b1b23 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
@@ -303,6 +303,31 @@ public void postClose(boolean abortRequested) {
}
}
+ /**
+ * See
+ * {@link RegionObserver#preCompactScannerOpen(ObserverContext, Store, List, ScanType, long, InternalScanner)}
+ */
+ public InternalScanner preCompactScannerOpen(Store store, List<StoreFileScanner> scanners,
+ ScanType scanType, long earliestPutTs) throws IOException {
+ ObserverContext<RegionCoprocessorEnvironment> ctx = null;
+ InternalScanner s = null;
+ for (RegionEnvironment env: coprocessors) {
+ if (env.getInstance() instanceof RegionObserver) {
+ ctx = ObserverContext.createAndPrepare(env, ctx);
+ try {
+ s = ((RegionObserver) env.getInstance()).preCompactScannerOpen(ctx, store, scanners,
+ scanType, earliestPutTs, s);
+ } catch (Throwable e) {
+ handleCoprocessorThrowable(env,e);
+ }
+ if (ctx.shouldComplete()) {
+ break;
+ }
+ }
+ }
+ return s;
+ }
+
/**
* Called prior to selecting the {@link StoreFile}s for compaction from
* the list of currently available candidates.
@@ -389,7 +414,7 @@ public InternalScanner preCompact(Store store, InternalScanner scanner) throws I
* Called after the store compaction has completed.
* @param store the store being compacted
* @param resultFile the new store file written during compaction
- * @throws IOException
+ * @throws IOException
*/
public void postCompact(Store store, StoreFile resultFile) throws IOException {
ObserverContext<RegionCoprocessorEnvironment> ctx = null;
@@ -408,6 +433,31 @@ public void postCompact(Store store, StoreFile resultFile) throws IOException {
}
}
+ /**
+ * Invoked before a memstore flush
+ * @throws IOException
+ */
+ public InternalScanner preFlush(Store store, InternalScanner scanner) throws IOException {
+ ObserverContext<RegionCoprocessorEnvironment> ctx = null;
+ boolean bypass = false;
+ for (RegionEnvironment env: coprocessors) {
+ if (env.getInstance() instanceof RegionObserver) {
+ ctx = ObserverContext.createAndPrepare(env, ctx);
+ try {
+ scanner = ((RegionObserver)env.getInstance()).preFlush(
+ ctx, store, scanner);
+ } catch (Throwable e) {
+ handleCoprocessorThrowable(env,e);
+ }
+ bypass |= ctx.shouldBypass();
+ if (ctx.shouldComplete()) {
+ break;
+ }
+ }
+ }
+ return bypass ? null : scanner;
+ }
+
/**
* Invoked before a memstore flush
* @throws IOException
@@ -429,9 +479,32 @@ public void preFlush() throws IOException {
}
}
+ /**
+ * See
+ * {@link RegionObserver#preFlush(ObserverContext, Store, KeyValueScanner)}
+ */
+ public InternalScanner preFlushScannerOpen(Store store, KeyValueScanner memstoreScanner) throws IOException {
+ ObserverContext<RegionCoprocessorEnvironment> ctx = null;
+ InternalScanner s = null;
+ for (RegionEnvironment env : coprocessors) {
+ if (env.getInstance() instanceof RegionObserver) {
+ ctx = ObserverContext.createAndPrepare(env, ctx);
+ try {
+ s = ((RegionObserver) env.getInstance()).preFlushScannerOpen(ctx, store, memstoreScanner, s);
+ } catch (Throwable e) {
+ handleCoprocessorThrowable(env, e);
+ }
+ if (ctx.shouldComplete()) {
+ break;
+ }
+ }
+ }
+ return s;
+ }
+
/**
* Invoked after a memstore flush
- * @throws IOException
+ * @throws IOException
*/
public void postFlush() throws IOException {
ObserverContext<RegionCoprocessorEnvironment> ctx = null;
@@ -450,9 +523,30 @@ public void postFlush() throws IOException {
}
}
+ /**
+ * Invoked after a memstore flush
+ * @throws IOException
+ */
+ public void postFlush(final Store store, final StoreFile storeFile) throws IOException {
+ ObserverContext<RegionCoprocessorEnvironment> ctx = null;
+ for (RegionEnvironment env: coprocessors) {
+ if (env.getInstance() instanceof RegionObserver) {
+ ctx = ObserverContext.createAndPrepare(env, ctx);
+ try {
+ ((RegionObserver)env.getInstance()).postFlush(ctx, store, storeFile);
+ } catch (Throwable e) {
+ handleCoprocessorThrowable(env, e);
+ }
+ if (ctx.shouldComplete()) {
+ break;
+ }
+ }
+ }
+ }
+
/**
* Invoked just before a split
- * @throws IOException
+ * @throws IOException
*/
public void preSplit() throws IOException {
ObserverContext<RegionCoprocessorEnvironment> ctx = null;
@@ -1088,6 +1182,31 @@ public RegionScanner preScannerOpen(Scan scan) throws IOException {
return bypass ? s : null;
}
+ /**
+ * See
+ * {@link RegionObserver#preStoreScannerOpen(ObserverContext, Store, Scan, NavigableSet, KeyValueScanner)}
+ */
+ public KeyValueScanner preStoreScannerOpen(Store store, Scan scan,
+ final NavigableSet<byte[]> targetCols) throws IOException {
+ KeyValueScanner s = null;
+ ObserverContext<RegionCoprocessorEnvironment> ctx = null;
+ for (RegionEnvironment env: coprocessors) {
+ if (env.getInstance() instanceof RegionObserver) {
+ ctx = ObserverContext.createAndPrepare(env, ctx);
+ try {
+ s = ((RegionObserver) env.getInstance()).preStoreScannerOpen(ctx, store, scan,
+ targetCols, s);
+ } catch (Throwable e) {
+ handleCoprocessorThrowable(env, e);
+ }
+ if (ctx.shouldComplete()) {
+ break;
+ }
+ }
+ }
+ return s;
+ }
+
/**
* @param scan the Scan specification
* @param s the scanner
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
index 20e297864851..f02afd7171b1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
@@ -34,8 +34,6 @@
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.regionserver.StoreScanner.ScanType;
-
/**
* A query matcher that is specifically designed for the scan case.
*/
@@ -138,7 +136,7 @@ public class ScanQueryMatcher {
* based on TTL
*/
public ScanQueryMatcher(Scan scan, Store.ScanInfo scanInfo,
- NavigableSet<byte[]> columns, StoreScanner.ScanType scanType,
+ NavigableSet<byte[]> columns, ScanType scanType,
long readPointToUse, long earliestPutTs, long oldestUnexpiredTS) {
this.tr = scan.getTimeRange();
this.rowComparator = scanInfo.getComparator().getRawComparator();
@@ -185,7 +183,7 @@ public ScanQueryMatcher(Scan scan, Store.ScanInfo scanInfo,
*/
ScanQueryMatcher(Scan scan, Store.ScanInfo scanInfo,
NavigableSet<byte[]> columns, long oldestUnexpiredTS) {
- this(scan, scanInfo, columns, StoreScanner.ScanType.USER_SCAN,
+ this(scan, scanInfo, columns, ScanType.USER_SCAN,
Long.MAX_VALUE, /* max Readpoint to track versions */
HConstants.LATEST_TIMESTAMP, oldestUnexpiredTS);
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanType.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanType.java
new file mode 100644
index 000000000000..7b075120cbee
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanType.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+/**
+ * Enum to distinguish general scan types.
+ */
[email protected]
+public enum ScanType {
+ MAJOR_COMPACT,
+ MINOR_COMPACT,
+ USER_SCAN
+}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java
index 3f5d76c06037..87a1c13f88d9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java
@@ -63,7 +63,6 @@
import org.apache.hadoop.hbase.io.hfile.InvalidHFileException;
import org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
-import org.apache.hadoop.hbase.regionserver.StoreScanner.ScanType;
import org.apache.hadoop.hbase.regionserver.compactions.CompactSelection;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionProgress;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
@@ -212,9 +211,7 @@ protected Store(Path basedir, HRegion region, HColumnDescriptor family,
"ms in store " + this);
// Why not just pass a HColumnDescriptor in here altogether? Even if have
// to clone it?
- scanInfo = new ScanInfo(family.getName(), family.getMinVersions(),
- family.getMaxVersions(), ttl, family.getKeepDeletedCells(),
- timeToPurgeDeletes, this.comparator);
+ scanInfo = new ScanInfo(family, ttl, timeToPurgeDeletes, this.comparator);
this.memstore = new MemStore(conf, this.comparator);
// By default, compact if storefile.count >= minFilesToCompact
@@ -728,15 +725,30 @@ private Path internalFlushCache(final SortedSet<KeyValue> set,
if (set.size() == 0) {
return null;
}
- Scan scan = new Scan();
- scan.setMaxVersions(scanInfo.getMaxVersions());
// Use a store scanner to find which rows to flush.
// Note that we need to retain deletes, hence
// treat this as a minor compaction.
- InternalScanner scanner = new StoreScanner(this, scan, Collections
- .singletonList(new CollectionBackedScanner(set, this.comparator)),
- ScanType.MINOR_COMPACT, this.region.getSmallestReadPoint(),
- HConstants.OLDEST_TIMESTAMP);
+ InternalScanner scanner = null;
+ KeyValueScanner memstoreScanner = new CollectionBackedScanner(set, this.comparator);
+ if (getHRegion().getCoprocessorHost() != null) {
+ scanner = getHRegion().getCoprocessorHost().preFlushScannerOpen(this, memstoreScanner);
+ }
+ if (scanner == null) {
+ Scan scan = new Scan();
+ scan.setMaxVersions(scanInfo.getMaxVersions());
+ scanner = new StoreScanner(this, scanInfo, scan, Collections.singletonList(new CollectionBackedScanner(
+ set, this.comparator)), ScanType.MINOR_COMPACT, this.region.getSmallestReadPoint(),
+ HConstants.OLDEST_TIMESTAMP);
+ }
+ if (getHRegion().getCoprocessorHost() != null) {
+ InternalScanner cpScanner =
+ getHRegion().getCoprocessorHost().preFlush(this, scanner);
+ // NULL scanner returned from coprocessor hooks means skip normal processing
+ if (cpScanner == null) {
+ return null;
+ }
+ scanner = cpScanner;
+ }
try {
// TODO: We can fail in the below block before we complete adding this
// flush to list of store files. Add cleanup of anything put on filesystem
@@ -1941,11 +1953,18 @@ boolean getForceMajorCompaction() {
* are not in a compaction.
* @throws IOException
*/
- public StoreScanner getScanner(Scan scan,
+ public KeyValueScanner getScanner(Scan scan,
final NavigableSet<byte []> targetCols) throws IOException {
lock.readLock().lock();
try {
- return new StoreScanner(this, scan, targetCols);
+ KeyValueScanner scanner = null;
+ if (getHRegion().getCoprocessorHost() != null) {
+ scanner = getHRegion().getCoprocessorHost().preStoreScannerOpen(this, scan, targetCols);
+ }
+ if (scanner == null) {
+ scanner = new StoreScanner(this, getScanInfo(), scan, targetCols);
+ }
+ return scanner;
} finally {
lock.readLock().unlock();
}
@@ -2065,7 +2084,7 @@ boolean throttleCompaction(long compactionSize) {
return compactionSize > throttlePoint;
}
- HRegion getHRegion() {
+ public HRegion getHRegion() {
return this.region;
}
@@ -2168,6 +2187,12 @@ public boolean commit(MonitoredTask status) throws IOException {
}
storeFile = Store.this.commitFile(storeFilePath, cacheFlushId,
snapshotTimeRangeTracker, flushedSize, status);
+ if (Store.this.getHRegion().getCoprocessorHost() != null) {
+ Store.this.getHRegion()
+ .getCoprocessorHost()
+ .postFlush(Store.this, storeFile);
+ }
+
// Add new file to store files. Clear snapshot too while we have
// the Store write lock.
return Store.this.updateStorefiles(storeFile, snapshot);
@@ -2210,6 +2235,10 @@ public KeyValue.KVComparator getComparator() {
return comparator;
}
+ public ScanInfo getScanInfo() {
+ return scanInfo;
+ }
+
/**
* Immutable information for scans over a store.
*/
@@ -2226,6 +2255,17 @@ public static class ScanInfo {
+ (2 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_INT)
+ Bytes.SIZEOF_LONG + Bytes.SIZEOF_BOOLEAN);
+ /**
+ * @param family {@link HColumnDescriptor} describing the column family
+ * @param ttl Store's TTL (in ms)
+ * @param timeToPurgeDeletes duration in ms after which a delete marker can
+ * be purged during a major compaction.
+ * @param comparator The store's comparator
+ */
+ public ScanInfo(HColumnDescriptor family, long ttl, long timeToPurgeDeletes, KVComparator comparator) {
+ this(family.getName(), family.getMinVersions(), family.getMaxVersions(), ttl, family
+ .getKeepDeletedCells(), timeToPurgeDeletes, comparator);
+ }
/**
* @param family Name of this store's column family
* @param minVersions Store's MIN_VERSIONS setting
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
index a46cb72ab5e4..cad774130e92 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
@@ -33,6 +33,7 @@
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.regionserver.Store.ScanInfo;
import org.apache.hadoop.hbase.regionserver.metrics.RegionMetricsStorage;
import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
import org.apache.hadoop.hbase.util.Bytes;
@@ -43,7 +44,7 @@
* into List<KeyValue> for a single row.
*/
@InterfaceAudience.Private
-class StoreScanner extends NonLazyKeyValueScanner
+public class StoreScanner extends NonLazyKeyValueScanner
implements KeyValueScanner, InternalScanner, ChangedReadersObserver {
static final Log LOG = LogFactory.getLog(StoreScanner.class);
private Store store;
@@ -106,16 +107,16 @@ private StoreScanner(Store store, boolean cacheBlocks, Scan scan,
* @param columns which columns we are scanning
* @throws IOException
*/
- StoreScanner(Store store, Scan scan, final NavigableSet<byte[]> columns)
+ public StoreScanner(Store store, ScanInfo scanInfo, Scan scan, final NavigableSet<byte[]> columns)
throws IOException {
- this(store, scan.getCacheBlocks(), scan, columns, store.scanInfo.getTtl(),
- store.scanInfo.getMinVersions());
+ this(store, scan.getCacheBlocks(), scan, columns, scanInfo.getTtl(),
+ scanInfo.getMinVersions());
initializeMetricNames();
if (columns != null && scan.isRaw()) {
throw new DoNotRetryIOException(
"Cannot specify any column for a raw scan");
}
- matcher = new ScanQueryMatcher(scan, store.scanInfo, columns,
+ matcher = new ScanQueryMatcher(scan, scanInfo, columns,
ScanType.USER_SCAN, Long.MAX_VALUE, HConstants.LATEST_TIMESTAMP,
oldestUnexpiredTS);
@@ -158,13 +159,13 @@ private StoreScanner(Store store, boolean cacheBlocks, Scan scan,
* @param smallestReadPoint the readPoint that we should use for tracking
* versions
*/
- StoreScanner(Store store, Scan scan,
+ public StoreScanner(Store store, ScanInfo scanInfo, Scan scan,
List<? extends KeyValueScanner> scanners, ScanType scanType,
long smallestReadPoint, long earliestPutTs) throws IOException {
- this(store, false, scan, null, store.scanInfo.getTtl(),
- store.scanInfo.getMinVersions());
+ this(store, false, scan, null, scanInfo.getTtl(),
+ scanInfo.getMinVersions());
initializeMetricNames();
- matcher = new ScanQueryMatcher(scan, store.scanInfo, null, scanType,
+ matcher = new ScanQueryMatcher(scan, scanInfo, null, scanType,
smallestReadPoint, earliestPutTs, oldestUnexpiredTS);
// Filter the list of scanners using Bloom filters, time range, TTL, etc.
@@ -181,7 +182,7 @@ private StoreScanner(Store store, boolean cacheBlocks, Scan scan,
/** Constructor for testing. */
StoreScanner(final Scan scan, Store.ScanInfo scanInfo,
- StoreScanner.ScanType scanType, final NavigableSet<byte[]> columns,
+ ScanType scanType, final NavigableSet<byte[]> columns,
final List<KeyValueScanner> scanners) throws IOException {
this(scan, scanInfo, scanType, columns, scanners,
HConstants.LATEST_TIMESTAMP);
@@ -189,7 +190,7 @@ private StoreScanner(Store store, boolean cacheBlocks, Scan scan,
// Constructor for testing.
StoreScanner(final Scan scan, Store.ScanInfo scanInfo,
- StoreScanner.ScanType scanType, final NavigableSet<byte[]> columns,
+ ScanType scanType, final NavigableSet<byte[]> columns,
final List<KeyValueScanner> scanners, long earliestPutTs)
throws IOException {
this(null, scan.getCacheBlocks(), scan, columns, scanInfo.getTtl(),
@@ -598,14 +599,5 @@ List<KeyValueScanner> getAllScannersForTesting() {
static void enableLazySeekGlobally(boolean enable) {
lazySeekEnabledGlobally = enable;
}
-
- /**
- * Enum to distinguish general scan types.
- */
- public static enum ScanType {
- MAJOR_COMPACT,
- MINOR_COMPACT,
- USER_SCAN
- }
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index fc4fe2e4f6f7..767202e11677 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -788,6 +788,22 @@ public void flush(byte [] tableName) throws IOException {
this.hbaseCluster.flushcache(tableName);
}
+ /**
+ * Compact all regions in the mini hbase cluster
+ * @throws IOException
+ */
+ public void compact(boolean major) throws IOException {
+ this.hbaseCluster.compact(major);
+ }
+
+ /**
+ * Compact all of a table's reagion in the mini hbase cluster
+ * @throws IOException
+ */
+ public void compact(byte [] tableName, boolean major) throws IOException {
+ this.hbaseCluster.compact(tableName, major);
+ }
+
/**
* Create a table.
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
index c7442ae57a29..e5743036730e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
@@ -454,6 +454,34 @@ public void flushcache(byte [] tableName) throws IOException {
}
}
+ /**
+ * Call flushCache on all regions on all participating regionservers.
+ * @throws IOException
+ */
+ public void compact(boolean major) throws IOException {
+ for (JVMClusterUtil.RegionServerThread t:
+ this.hbaseCluster.getRegionServers()) {
+ for(HRegion r: t.getRegionServer().getOnlineRegionsLocalContext()) {
+ r.compactStores(major);
+ }
+ }
+ }
+
+ /**
+ * Call flushCache on all regions of the specified table.
+ * @throws IOException
+ */
+ public void compact(byte [] tableName, boolean major) throws IOException {
+ for (JVMClusterUtil.RegionServerThread t:
+ this.hbaseCluster.getRegionServers()) {
+ for(HRegion r: t.getRegionServer().getOnlineRegionsLocalContext()) {
+ if(Bytes.equals(r.getTableDesc().getName(), tableName)) {
+ r.compactStores(major);
+ }
+ }
+ }
+ }
+
/**
* @return List of region server threads.
*/
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index 2dadc7c93f2b..2b67c5daabd2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -90,12 +90,12 @@
@Category(LargeTests.class)
public class TestFromClientSide {
final Log LOG = LogFactory.getLog(getClass());
- private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+ protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static byte [] ROW = Bytes.toBytes("testRow");
private static byte [] FAMILY = Bytes.toBytes("testFamily");
private static byte [] QUALIFIER = Bytes.toBytes("testQualifier");
private static byte [] VALUE = Bytes.toBytes("testValue");
- private static int SLAVES = 3;
+ protected static int SLAVES = 3;
/**
* @throws java.lang.Exception
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideWithCoprocessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideWithCoprocessor.java
new file mode 100644
index 000000000000..7b313dc67fac
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideWithCoprocessor.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.LargeTests;
+import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
+import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
+import org.apache.hadoop.hbase.regionserver.NoOpScanPolicyObserver;
+import org.junit.BeforeClass;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Test all client operations with a coprocessor that
+ * just implements the default flush/compact/scan policy
+ */
+@Category(LargeTests.class)
+public class TestFromClientSideWithCoprocessor extends TestFromClientSide {
+ @BeforeClass
+ public static void setUpBeforeClass() throws Exception {
+ Configuration conf = TEST_UTIL.getConfiguration();
+ conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
+ MultiRowMutationEndpoint.class.getName(), NoOpScanPolicyObserver.class.getName());
+ // We need more than one region server in this test
+ TEST_UTIL.startMiniCluster(SLAVES);
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
index a691bacc4366..119a4878e985 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
@@ -29,6 +29,7 @@
import java.util.List;
import java.util.Map;
import java.util.Arrays;
+import java.util.NavigableSet;
import com.google.common.collect.ImmutableList;
import org.apache.commons.logging.Log;
@@ -42,7 +43,9 @@
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
+import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.hadoop.hbase.regionserver.ScanType;
import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
@@ -63,11 +66,13 @@ public class SimpleRegionObserver extends BaseRegionObserver {
boolean hadPreClose;
boolean hadPostClose;
boolean hadPreFlush;
+ boolean hadPreFlushScannerOpen;
boolean hadPostFlush;
boolean hadPreSplit;
boolean hadPostSplit;
boolean hadPreCompactSelect;
boolean hadPostCompactSelect;
+ boolean hadPreCompactScanner;
boolean hadPreCompact;
boolean hadPostCompact;
boolean hadPreGet = false;
@@ -87,6 +92,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
boolean hadPreScannerClose = false;
boolean hadPostScannerClose = false;
boolean hadPreScannerOpen = false;
+ boolean hadPreStoreScannerOpen = false;
boolean hadPostScannerOpen = false;
boolean hadPreBulkLoadHFile = false;
boolean hadPostBulkLoadHFile = false;
@@ -120,12 +126,20 @@ public boolean wasClosed() {
}
@Override
- public void preFlush(ObserverContext<RegionCoprocessorEnvironment> c) {
+ public InternalScanner preFlush(ObserverContext<RegionCoprocessorEnvironment> c, Store store, InternalScanner scanner) {
hadPreFlush = true;
+ return scanner;
}
@Override
- public void postFlush(ObserverContext<RegionCoprocessorEnvironment> c) {
+ public InternalScanner preFlushScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
+ Store store, KeyValueScanner memstoreScanner, InternalScanner s) throws IOException {
+ hadPreFlushScannerOpen = true;
+ return null;
+ }
+
+ @Override
+ public void postFlush(ObserverContext<RegionCoprocessorEnvironment> c, Store store, StoreFile resultFile) {
hadPostFlush = true;
}
@@ -166,6 +180,14 @@ public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment>
return scanner;
}
+ @Override
+ public InternalScanner preCompactScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
+ Store store, List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs,
+ InternalScanner s) throws IOException {
+ hadPreCompactScanner = true;
+ return null;
+ }
+
@Override
public void postCompact(ObserverContext<RegionCoprocessorEnvironment> e,
Store store, StoreFile resultFile) {
@@ -184,6 +206,14 @@ public RegionScanner preScannerOpen(final ObserverContext<RegionCoprocessorEnvir
return null;
}
+ @Override
+ public KeyValueScanner preStoreScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
+ final Store store, final Scan scan, final NavigableSet<byte[]> targetCols,
+ final KeyValueScanner s) throws IOException {
+ hadPreStoreScannerOpen = true;
+ return null;
+ }
+
@Override
public RegionScanner postScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
final Scan scan, final RegionScanner s)
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java
index ebc5373e2248..e6ff17305eca 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java
@@ -61,7 +61,6 @@
import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl;
import org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter;
import org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder;
-import org.apache.hadoop.hbase.regionserver.StoreScanner.ScanType;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.LoadTestTool;
import org.apache.hadoop.hbase.util.MD5Hash;
@@ -408,7 +407,7 @@ private void performMerge(List<StoreFileScanner> scanners, Store store,
Scan scan = new Scan();
// Include deletes
- scanner = new StoreScanner(store, scan, scanners,
+ scanner = new StoreScanner(store, store.scanInfo, scan, scanners,
ScanType.MAJOR_COMPACT, Long.MIN_VALUE, Long.MIN_VALUE);
ArrayList<KeyValue> kvs = new ArrayList<KeyValue>();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/NoOpScanPolicyObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/NoOpScanPolicyObserver.java
new file mode 100644
index 000000000000..668c04372c20
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/NoOpScanPolicyObserver.java
@@ -0,0 +1,62 @@
+package org.apache.hadoop.hbase.regionserver;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+import java.util.NavigableSet;
+
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.TestFromClientSideWithCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+
+/**
+ * RegionObserver that just reimplements the default behavior,
+ * in order to validate that all the necessary APIs for this are public
+ * This observer is also used in {@link TestFromClientSideWithCoprocessor} and
+ * {@link TestCompactionWithCoprocessor} to make sure that a wide range
+ * of functionality still behaves as expected.
+ */
+public class NoOpScanPolicyObserver extends BaseRegionObserver {
+ /**
+ * Reimplement the default behavior
+ */
+ @Override
+ public InternalScanner preFlushScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
+ Store store, KeyValueScanner memstoreScanner, InternalScanner s) throws IOException {
+ Store.ScanInfo oldSI = store.getScanInfo();
+ Store.ScanInfo scanInfo = new Store.ScanInfo(store.getFamily(), oldSI.getTtl(),
+ oldSI.getTimeToPurgeDeletes(), oldSI.getComparator());
+ Scan scan = new Scan();
+ scan.setMaxVersions(oldSI.getMaxVersions());
+ return new StoreScanner(store, scanInfo, scan, Collections.singletonList(memstoreScanner),
+ ScanType.MINOR_COMPACT, store.getHRegion().getSmallestReadPoint(),
+ HConstants.OLDEST_TIMESTAMP);
+ }
+
+ /**
+ * Reimplement the default behavior
+ */
+ @Override
+ public InternalScanner preCompactScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
+ Store store, List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs,
+ InternalScanner s) throws IOException {
+ // this demonstrates how to override the scanners default behavior
+ Store.ScanInfo oldSI = store.getScanInfo();
+ Store.ScanInfo scanInfo = new Store.ScanInfo(store.getFamily(), oldSI.getTtl(),
+ oldSI.getTimeToPurgeDeletes(), oldSI.getComparator());
+ Scan scan = new Scan();
+ scan.setMaxVersions(oldSI.getMaxVersions());
+ return new StoreScanner(store, scanInfo, scan, scanners, scanType, store.getHRegion()
+ .getSmallestReadPoint(), earliestPutTs);
+ }
+
+ @Override
+ public KeyValueScanner preStoreScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
+ Store store, final Scan scan, final NavigableSet<byte[]> targetCols, KeyValueScanner s)
+ throws IOException {
+ return new StoreScanner(store, store.getScanInfo(), scan, targetCols);
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionWithCoprocessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionWithCoprocessor.java
new file mode 100644
index 000000000000..ba30a9fdf388
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionWithCoprocessor.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Make sure all compaction tests still pass with the preFlush and preCompact
+ * overridden to implement the default behavior
+ */
+@Category(MediumTests.class)
+public class TestCompactionWithCoprocessor extends TestCompaction {
+ /** constructor */
+ public TestCompactionWithCoprocessor() throws Exception {
+ super();
+ conf.setStrings(CoprocessorHost.USER_REGION_COPROCESSOR_CONF_KEY,
+ NoOpScanPolicyObserver.class.getName());
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java
index 8e8ae45a5a7f..0da62dfc17c4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java
@@ -36,7 +36,6 @@
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.regionserver.Store.ScanInfo;
-import org.apache.hadoop.hbase.regionserver.StoreScanner.ScanType;
import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
import org.apache.hadoop.hbase.util.Bytes;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
index 3c582338e69b..01f0731549f2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
@@ -38,7 +38,6 @@
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.regionserver.Store.ScanInfo;
-import org.apache.hadoop.hbase.regionserver.StoreScanner.ScanType;
import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdge;
@@ -559,7 +558,7 @@ public long currentTimeMillis() {
KeyValue.COMPARATOR);
StoreScanner scanner =
new StoreScanner(scan, scanInfo,
- StoreScanner.ScanType.MAJOR_COMPACT, null, scanners,
+ ScanType.MAJOR_COMPACT, null, scanners,
HConstants.OLDEST_TIMESTAMP);
List<KeyValue> results = new ArrayList<KeyValue>();
results = new ArrayList<KeyValue>();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java
new file mode 100644
index 000000000000..1915ca372762
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java
@@ -0,0 +1,262 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+// this is deliberately not in the o.a.h.h.regionserver package
+// in order to make sure all required classes/method are available
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.NavigableSet;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
+import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.regionserver.InternalScanner;
+import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
+import org.apache.hadoop.hbase.regionserver.ScanType;
+import org.apache.hadoop.hbase.regionserver.Store;
+import org.apache.hadoop.hbase.regionserver.StoreScanner;
+import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import static org.junit.Assert.*;
+
+@Category(MediumTests.class)
+public class TestCoprocessorScanPolicy {
+ final Log LOG = LogFactory.getLog(getClass());
+ protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+ private static final byte[] F = Bytes.toBytes("fam");
+ private static final byte[] Q = Bytes.toBytes("qual");
+ private static final byte[] R = Bytes.toBytes("row");
+
+
+ @BeforeClass
+ public static void setUpBeforeClass() throws Exception {
+ Configuration conf = TEST_UTIL.getConfiguration();
+ conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
+ ScanObserver.class.getName());
+ TEST_UTIL.startMiniCluster();
+ }
+
+ @AfterClass
+ public static void tearDownAfterClass() throws Exception {
+ TEST_UTIL.shutdownMiniCluster();
+ }
+
+ @Test
+ public void testBaseCases() throws Exception {
+ byte[] tableName = Bytes.toBytes("baseCases");
+ HTable t = TEST_UTIL.createTable(tableName, F, 1);
+ // set the version override to 2
+ Put p = new Put(R);
+ p.setAttribute("versions", new byte[]{});
+ p.add(F, tableName, Bytes.toBytes(2));
+ t.put(p);
+
+ // insert 2 versions
+ p = new Put(R);
+ p.add(F, Q, Q);
+ t.put(p);
+ p = new Put(R);
+ p.add(F, Q, Q);
+ t.put(p);
+ Get g = new Get(R);
+ g.setMaxVersions(10);
+ Result r = t.get(g);
+ assertEquals(2, r.size());
+
+ TEST_UTIL.flush(tableName);
+ TEST_UTIL.compact(tableName, true);
+
+ // both version are still visible even after a flush/compaction
+ g = new Get(R);
+ g.setMaxVersions(10);
+ r = t.get(g);
+ assertEquals(2, r.size());
+
+ // insert a 3rd version
+ p = new Put(R);
+ p.add(F, Q, Q);
+ t.put(p);
+ g = new Get(R);
+ g.setMaxVersions(10);
+ r = t.get(g);
+ // still only two version visible
+ assertEquals(2, r.size());
+
+ t.close();
+ }
+
+ @Test
+ public void testTTL() throws Exception {
+ byte[] tableName = Bytes.toBytes("testTTL");
+ HTableDescriptor desc = new HTableDescriptor(tableName);
+ HColumnDescriptor hcd = new HColumnDescriptor(F)
+ .setMaxVersions(10)
+ .setTimeToLive(1);
+ desc.addFamily(hcd);
+ TEST_UTIL.getHBaseAdmin().createTable(desc);
+ HTable t = new HTable(new Configuration(TEST_UTIL.getConfiguration()), tableName);
+ long now = EnvironmentEdgeManager.currentTimeMillis();
+ ManualEnvironmentEdge me = new ManualEnvironmentEdge();
+ me.setValue(now);
+ EnvironmentEdgeManagerTestHelper.injectEdge(me);
+ // 2s in the past
+ long ts = now - 2000;
+ // Set the TTL override to 3s
+ Put p = new Put(R);
+ p.setAttribute("ttl", new byte[]{});
+ p.add(F, tableName, Bytes.toBytes(3000L));
+ t.put(p);
+
+ p = new Put(R);
+ p.add(F, Q, ts, Q);
+ t.put(p);
+ p = new Put(R);
+ p.add(F, Q, ts+1, Q);
+ t.put(p);
+
+ // these two should be expired but for the override
+ // (their ts was 2s in the past)
+ Get g = new Get(R);
+ g.setMaxVersions(10);
+ Result r = t.get(g);
+ // still there?
+ assertEquals(2, r.size());
+
+ TEST_UTIL.flush(tableName);
+ TEST_UTIL.compact(tableName, true);
+
+ g = new Get(R);
+ g.setMaxVersions(10);
+ r = t.get(g);
+ // still there?
+ assertEquals(2, r.size());
+
+ // roll time forward 2s.
+ me.setValue(now + 2000);
+ // now verify that data eventually does expire
+ g = new Get(R);
+ g.setMaxVersions(10);
+ r = t.get(g);
+ // should be gone now
+ assertEquals(0, r.size());
+ t.close();
+ }
+
+ public static class ScanObserver extends BaseRegionObserver {
+ private Map<String, Long> ttls = new HashMap<String,Long>();
+ private Map<String, Integer> versions = new HashMap<String,Integer>();
+
+ // lame way to communicate with the coprocessor,
+ // since it is loaded by a different class loader
+ @Override
+ public void prePut(final ObserverContext<RegionCoprocessorEnvironment> c, final Put put,
+ final WALEdit edit, final boolean writeToWAL) throws IOException {
+ if (put.getAttribute("ttl") != null) {
+ KeyValue kv = put.getFamilyMap().values().iterator().next().get(0);
+ ttls.put(Bytes.toString(kv.getQualifier()), Bytes.toLong(kv.getValue()));
+ c.bypass();
+ } else if (put.getAttribute("versions") != null) {
+ KeyValue kv = put.getFamilyMap().values().iterator().next().get(0);
+ versions.put(Bytes.toString(kv.getQualifier()), Bytes.toInt(kv.getValue()));
+ c.bypass();
+ }
+ }
+
+ @Override
+ public InternalScanner preFlushScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
+ Store store, KeyValueScanner memstoreScanner, InternalScanner s) throws IOException {
+ Long newTtl = ttls.get(store.getTableName());
+ if (newTtl != null) {
+ System.out.println("PreFlush:" + newTtl);
+ }
+ Integer newVersions = versions.get(store.getTableName());
+ Store.ScanInfo oldSI = store.getScanInfo();
+ HColumnDescriptor family = store.getFamily();
+ Store.ScanInfo scanInfo = new Store.ScanInfo(family.getName(), family.getMinVersions(),
+ newVersions == null ? family.getMaxVersions() : newVersions,
+ newTtl == null ? oldSI.getTtl() : newTtl, family.getKeepDeletedCells(),
+ oldSI.getTimeToPurgeDeletes(), oldSI.getComparator());
+ Scan scan = new Scan();
+ scan.setMaxVersions(newVersions == null ? oldSI.getMaxVersions() : newVersions);
+ return new StoreScanner(store, scanInfo, scan, Collections.singletonList(memstoreScanner),
+ ScanType.MINOR_COMPACT, store.getHRegion().getSmallestReadPoint(),
+ HConstants.OLDEST_TIMESTAMP);
+ }
+
+ @Override
+ public InternalScanner preCompactScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
+ Store store, List<? extends KeyValueScanner> scanners, ScanType scanType,
+ long earliestPutTs, InternalScanner s) throws IOException {
+ Long newTtl = ttls.get(store.getTableName());
+ Integer newVersions = versions.get(store.getTableName());
+ Store.ScanInfo oldSI = store.getScanInfo();
+ HColumnDescriptor family = store.getFamily();
+ Store.ScanInfo scanInfo = new Store.ScanInfo(family.getName(), family.getMinVersions(),
+ newVersions == null ? family.getMaxVersions() : newVersions,
+ newTtl == null ? oldSI.getTtl() : newTtl, family.getKeepDeletedCells(),
+ oldSI.getTimeToPurgeDeletes(), oldSI.getComparator());
+ Scan scan = new Scan();
+ scan.setMaxVersions(newVersions == null ? oldSI.getMaxVersions() : newVersions);
+ return new StoreScanner(store, scanInfo, scan, scanners, scanType, store.getHRegion()
+ .getSmallestReadPoint(), earliestPutTs);
+ }
+
+ @Override
+ public KeyValueScanner preStoreScannerOpen(
+ final ObserverContext<RegionCoprocessorEnvironment> c, Store store, final Scan scan,
+ final NavigableSet<byte[]> targetCols, KeyValueScanner s) throws IOException {
+ Long newTtl = ttls.get(store.getTableName());
+ Integer newVersions = versions.get(store.getTableName());
+ Store.ScanInfo oldSI = store.getScanInfo();
+ HColumnDescriptor family = store.getFamily();
+ Store.ScanInfo scanInfo = new Store.ScanInfo(family.getName(), family.getMinVersions(),
+ newVersions == null ? family.getMaxVersions() : newVersions,
+ newTtl == null ? oldSI.getTtl() : newTtl, family.getKeepDeletedCells(),
+ oldSI.getTimeToPurgeDeletes(), oldSI.getComparator());
+ return new StoreScanner(store, scanInfo, scan, targetCols);
+ }
+ }
+
+ @org.junit.Rule
+ public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
+ new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
+}
|
135ec78d7d5854af7e5a764d4c3bb50ccf188eeb
|
kotlin
|
Fixed EA-70945--
|
c
|
https://github.com/JetBrains/kotlin
|
diff --git a/idea/idea-completion/testData/smart/EA70945.kt b/idea/idea-completion/testData/smart/EA70945.kt
new file mode 100644
index 0000000000000..1673544f07d95
--- /dev/null
+++ b/idea/idea-completion/testData/smart/EA70945.kt
@@ -0,0 +1,9 @@
+class A {
+ val foo: Int = 0
+}
+
+fun f() {
+ A().foo(<caret>)
+}
+
+// NUMBER: 0
diff --git a/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JvmSmartCompletionTestGenerated.java b/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JvmSmartCompletionTestGenerated.java
index 0ebb61cdc50b5..c73e6aba16076 100644
--- a/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JvmSmartCompletionTestGenerated.java
+++ b/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JvmSmartCompletionTestGenerated.java
@@ -113,6 +113,12 @@ public void testClassObjectMembersWithPrefix() throws Exception {
doTest(fileName);
}
+ @TestMetadata("EA70945.kt")
+ public void testEA70945() throws Exception {
+ String fileName = KotlinTestUtils.navigationMetadata("idea/idea-completion/testData/smart/EA70945.kt");
+ doTest(fileName);
+ }
+
@TestMetadata("EmptyPrefix.kt")
public void testEmptyPrefix() throws Exception {
String fileName = KotlinTestUtils.navigationMetadata("idea/idea-completion/testData/smart/EmptyPrefix.kt");
diff --git a/idea/idea-core/src/org/jetbrains/kotlin/idea/core/ExpectedInfos.kt b/idea/idea-core/src/org/jetbrains/kotlin/idea/core/ExpectedInfos.kt
index bf4d0c2db3499..53c7991390502 100644
--- a/idea/idea-core/src/org/jetbrains/kotlin/idea/core/ExpectedInfos.kt
+++ b/idea/idea-core/src/org/jetbrains/kotlin/idea/core/ExpectedInfos.kt
@@ -188,6 +188,8 @@ class ExpectedInfos(
private fun calculateForArgument(callElement: KtCallElement, argument: ValueArgument): Collection<ExpectedInfo>? {
val call = callElement.getCall(bindingContext) ?: return null
+ // sometimes we get wrong call (see testEA70945) TODO: refactor resolve so that it does not happen
+ if (call.callElement != callElement) return null
return calculateForArgument(call, argument)
}
|
d47f33629bcc15db2d0f9d75289b0d0da4850799
|
drools
|
add support for maven version ranges--
|
a
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/KieRepositoryImpl.java b/drools-compiler/src/main/java/org/kie/builder/impl/KieRepositoryImpl.java
index 5d61f05c047..418d8c0f14a 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/KieRepositoryImpl.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/KieRepositoryImpl.java
@@ -14,9 +14,19 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.math.BigInteger;
import java.net.URL;
+import java.util.ArrayList;
+import java.util.Arrays;
import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.ListIterator;
+import java.util.Locale;
import java.util.Map;
+import java.util.Properties;
+import java.util.Stack;
+import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicReference;
public class KieRepositoryImpl
@@ -30,7 +40,7 @@ public class KieRepositoryImpl
static final KieRepositoryImpl INSTANCE = new KieRepositoryImpl();
- private final Map<GAV, KieModule> kieModules = new HashMap<GAV, KieModule>();
+ private final KieModuleRepo kieModuleRepo = new KieModuleRepo();
private final AtomicReference<GAV> defaultGAV = new AtomicReference( new GAVImpl( DEFAULT_GROUP,
DEFAULT_ARTIFACT,
@@ -47,8 +57,7 @@ public GAV getDefaultGAV() {
}
public void addKieModule(KieModule kieModule) {
- kieModules.put(kieModule.getGAV(),
- kieModule);
+ kieModuleRepo.store(kieModule);
log.info( "KieModule was added:" + kieModule);
}
@@ -57,7 +66,9 @@ public Results verfyKieModule(GAV gav) {
}
public KieModule getKieModule(GAV gav) {
- KieModule kieModule = kieModules.get( gav );
+ VersionRange versionRange = new VersionRange(gav.getVersion());
+
+ KieModule kieModule = kieModuleRepo.load(gav, versionRange);
if ( kieModule == null ) {
log.debug( "KieModule Lookup. GAV {} was not in cache, checking classpath",
gav.toExternalForm() );
@@ -164,5 +175,441 @@ public KieModule getKieModule(Resource resource) {
throw new RuntimeException("Unable to fetch module from resource :" + res, e);
}
}
-
+
+ private static class KieModuleRepo {
+ private final Map<String, TreeMap<ComparableVersion, KieModule>> kieModules = new HashMap<String, TreeMap<ComparableVersion, KieModule>>();
+
+ void store(KieModule kieModule) {
+ GAV gav = kieModule.getGAV();
+ String ga = gav.getGroupId() + ":" + gav.getArtifactId();
+
+ TreeMap<ComparableVersion, KieModule> artifactMap = kieModules.get(ga);
+ if (artifactMap == null) {
+ artifactMap = new TreeMap<ComparableVersion, KieModule>();
+ kieModules.put(ga, artifactMap);
+ }
+ artifactMap.put(new ComparableVersion(gav.getVersion()), kieModule);
+ }
+
+ KieModule load(GAV gav, VersionRange versionRange) {
+ String ga = gav.getGroupId() + ":" + gav.getArtifactId();
+ TreeMap<ComparableVersion, KieModule> artifactMap = kieModules.get(ga);
+ if (artifactMap == null) {
+ return null;
+ }
+
+ if (versionRange.fixed) {
+ return artifactMap.get(new ComparableVersion(gav.getVersion()));
+ }
+
+ if (versionRange.upperBound == null) {
+ return artifactMap.lastEntry().getValue();
+ }
+
+ Map.Entry<ComparableVersion, KieModule> entry = versionRange.upperInclusive ?
+ artifactMap.ceilingEntry(new ComparableVersion(versionRange.upperBound)) :
+ artifactMap.lowerEntry(new ComparableVersion(versionRange.upperBound));
+
+ if (entry == null) {
+ return null;
+ }
+
+ if (versionRange.lowerBound == null) {
+ return entry.getValue();
+ }
+
+ int versionComparison = entry.getKey().compareTo(new ComparableVersion(versionRange.lowerBound));
+ return versionComparison > 0 || (versionComparison == 0 && versionRange.lowerInclusive) ? entry.getValue() : null;
+ }
+ }
+
+ private static class VersionRange {
+ private String lowerBound;
+ private String upperBound;
+ private boolean lowerInclusive;
+ private boolean upperInclusive;
+ private boolean fixed;
+
+ private VersionRange(String version) {
+ parse(version);
+ }
+
+ private void parse(String version) {
+ if ("LATEST".equals(version) || "RELEASE".equals(version)) {
+ fixed = false;
+ lowerBound = "1.0";
+ upperBound = null;
+ lowerInclusive = true;
+ upperInclusive = false;
+ return;
+ }
+
+ if (version.charAt(0) != '(' && version.charAt(0) != '[') {
+ fixed = true;
+ lowerBound = version;
+ upperBound = version;
+ lowerInclusive = true;
+ upperInclusive = true;
+ return;
+ }
+
+ lowerInclusive = version.charAt(0) == '[';
+ upperInclusive = version.charAt(version.length()-1) == ']';
+
+ int commaPos = version.indexOf(',');
+ if (commaPos < 0) {
+ fixed = true;
+ lowerBound = version.substring(1, version.length() - 1);
+ upperBound = lowerBound;
+ } else {
+ if (commaPos > 1) {
+ lowerBound = version.substring(1, commaPos);
+ }
+ if (commaPos < version.length()-2) {
+ upperBound = version.substring(commaPos + 1, version.length() - 1);
+ }
+ }
+ }
+ }
+
+ public static class ComparableVersion implements Comparable<ComparableVersion> {
+ private String value;
+
+ private String canonical;
+
+ private ListItem items;
+
+ private interface Item {
+ final int INTEGER_ITEM = 0;
+ final int STRING_ITEM = 1;
+ final int LIST_ITEM = 2;
+
+ int compareTo( Item item );
+
+ int getType();
+
+ boolean isNull();
+ }
+
+ private static class IntegerItem implements Item {
+ private static final BigInteger BigInteger_ZERO = new BigInteger( "0" );
+
+ private final BigInteger value;
+
+ public static final IntegerItem ZERO = new IntegerItem();
+
+ private IntegerItem() {
+ this.value = BigInteger_ZERO;
+ }
+
+ public IntegerItem( String str ) {
+ this.value = new BigInteger( str );
+ }
+
+ public int getType() {
+ return INTEGER_ITEM;
+ }
+
+ public boolean isNull() {
+ return BigInteger_ZERO.equals( value );
+ }
+
+ public int compareTo( Item item ) {
+ if ( item == null )
+ {
+ return BigInteger_ZERO.equals( value ) ? 0 : 1; // 1.0 == 1, 1.1 > 1
+ }
+
+ switch ( item.getType() )
+ {
+ case INTEGER_ITEM:
+ return value.compareTo( ( (IntegerItem) item ).value );
+
+ case STRING_ITEM:
+ return 1; // 1.1 > 1-sp
+
+ case LIST_ITEM:
+ return 1; // 1.1 > 1-1
+
+ default:
+ throw new RuntimeException( "invalid item: " + item.getClass() );
+ }
+ }
+
+ public String toString() {
+ return value.toString();
+ }
+ }
+
+ /**
+ * Represents a string in the version item list, usually a qualifier.
+ */
+ private static class StringItem implements Item {
+ private static final String[] QUALIFIERS = { "alpha", "beta", "milestone", "rc", "snapshot", "", "sp" };
+
+ private static final List<String> _QUALIFIERS = Arrays.asList(QUALIFIERS);
+
+ private static final Properties ALIASES = new Properties();
+
+ static {
+ ALIASES.put( "ga", "" );
+ ALIASES.put( "final", "" );
+ ALIASES.put( "cr", "rc" );
+ }
+
+ /**
+ * A comparable value for the empty-string qualifier. This one is used to determine if a given qualifier makes
+ * the version older than one without a qualifier, or more recent.
+ */
+ private static final String RELEASE_VERSION_INDEX = String.valueOf( _QUALIFIERS.indexOf( "" ) );
+
+ private String value;
+
+ public StringItem( String value, boolean followedByDigit ) {
+ if ( followedByDigit && value.length() == 1 ) {
+ // a1 = alpha-1, b1 = beta-1, m1 = milestone-1
+ switch ( value.charAt( 0 ) ) {
+ case 'a':
+ value = "alpha";
+ break;
+ case 'b':
+ value = "beta";
+ break;
+ case 'm':
+ value = "milestone";
+ break;
+ }
+ }
+ this.value = ALIASES.getProperty( value , value );
+ }
+
+ public int getType() {
+ return STRING_ITEM;
+ }
+
+ public boolean isNull() {
+ return ( comparableQualifier( value ).compareTo( RELEASE_VERSION_INDEX ) == 0 );
+ }
+
+ /**
+ * Returns a comparable value for a qualifier.
+ *
+ * This method both takes into account the ordering of known qualifiers as well as lexical ordering for unknown
+ * qualifiers.
+ *
+ * just returning an Integer with the index here is faster, but requires a lot of if/then/else to check for -1
+ * or QUALIFIERS.size and then resort to lexical ordering. Most comparisons are decided by the first character,
+ * so this is still fast. If more characters are needed then it requires a lexical sort anyway.
+ *
+ * @param qualifier
+ * @return an equivalent value that can be used with lexical comparison
+ */
+ public static String comparableQualifier( String qualifier ) {
+ int i = _QUALIFIERS.indexOf( qualifier );
+
+ return i == -1 ? _QUALIFIERS.size() + "-" + qualifier : String.valueOf( i );
+ }
+
+ public int compareTo( Item item ) {
+ if ( item == null ) {
+ // 1-rc < 1, 1-ga > 1
+ return comparableQualifier( value ).compareTo( RELEASE_VERSION_INDEX );
+ }
+ switch ( item.getType() ) {
+ case INTEGER_ITEM:
+ return -1; // 1.any < 1.1 ?
+
+ case STRING_ITEM:
+ return comparableQualifier( value ).compareTo( comparableQualifier( ( (StringItem) item ).value ) );
+
+ case LIST_ITEM:
+ return -1; // 1.any < 1-1
+
+ default:
+ throw new RuntimeException( "invalid item: " + item.getClass() );
+ }
+ }
+
+ public String toString() {
+ return value;
+ }
+ }
+
+ /**
+ * Represents a version list item. This class is used both for the global item list and for sub-lists (which start
+ * with '-(number)' in the version specification).
+ */
+ private static class ListItem extends ArrayList<Item> implements Item {
+ public int getType() {
+ return LIST_ITEM;
+ }
+
+ public boolean isNull() {
+ return ( size() == 0 );
+ }
+
+ void normalize() {
+ for( ListIterator<Item> iterator = listIterator( size() ); iterator.hasPrevious(); ) {
+ Item item = iterator.previous();
+ if ( item.isNull() ) {
+ iterator.remove(); // remove null trailing items: 0, "", empty list
+ } else {
+ break;
+ }
+ }
+ }
+
+ public int compareTo( Item item ) {
+ if ( item == null ) {
+ if ( size() == 0 ) {
+ return 0; // 1-0 = 1- (normalize) = 1
+ }
+ Item first = get( 0 );
+ return first.compareTo( null );
+ }
+ switch ( item.getType() ) {
+ case INTEGER_ITEM:
+ return -1; // 1-1 < 1.0.x
+
+ case STRING_ITEM:
+ return 1; // 1-1 > 1-sp
+
+ case LIST_ITEM:
+ Iterator<Item> left = iterator();
+ Iterator<Item> right = ( (ListItem) item ).iterator();
+
+ while ( left.hasNext() || right.hasNext() ) {
+ Item l = left.hasNext() ? left.next() : null;
+ Item r = right.hasNext() ? right.next() : null;
+
+ // if this is shorter, then invert the compare and mul with -1
+ int result = l == null ? -1 * r.compareTo( l ) : l.compareTo( r );
+
+ if ( result != 0 ) {
+ return result;
+ }
+ }
+
+ return 0;
+
+ default:
+ throw new RuntimeException( "invalid item: " + item.getClass() );
+ }
+ }
+
+ public String toString() {
+ StringBuilder buffer = new StringBuilder( "(" );
+ for( Iterator<Item> iter = iterator(); iter.hasNext(); )
+ {
+ buffer.append( iter.next() );
+ if ( iter.hasNext() )
+ {
+ buffer.append( ',' );
+ }
+ }
+ buffer.append( ')' );
+ return buffer.toString();
+ }
+ }
+
+ public ComparableVersion( String version ) {
+ parseVersion( version );
+ }
+
+ public final void parseVersion( String version ) {
+ this.value = version;
+
+ items = new ListItem();
+
+ version = version.toLowerCase( Locale.ENGLISH );
+
+ ListItem list = items;
+
+ Stack<Item> stack = new Stack<Item>();
+ stack.push( list );
+
+ boolean isDigit = false;
+
+ int startIndex = 0;
+
+ for ( int i = 0; i < version.length(); i++ ) {
+ char c = version.charAt( i );
+
+ if ( c == '.' ) {
+ if ( i == startIndex ) {
+ list.add( IntegerItem.ZERO );
+ } else {
+ list.add( parseItem( isDigit, version.substring( startIndex, i ) ) );
+ }
+ startIndex = i + 1;
+ } else if ( c == '-' ) {
+ if ( i == startIndex ) {
+ list.add( IntegerItem.ZERO );
+ } else {
+ list.add( parseItem( isDigit, version.substring( startIndex, i ) ) );
+ }
+ startIndex = i + 1;
+
+ if ( isDigit ) {
+ list.normalize(); // 1.0-* = 1-*
+
+ if ( ( i + 1 < version.length() ) && Character.isDigit( version.charAt( i + 1 ) ) ) {
+ // new ListItem only if previous were digits and new char is a digit,
+ // ie need to differentiate only 1.1 from 1-1
+ list.add( list = new ListItem() );
+
+ stack.push( list );
+ }
+ }
+ }
+ else if ( Character.isDigit( c ) ) {
+ if ( !isDigit && i > startIndex ) {
+ list.add( new StringItem( version.substring( startIndex, i ), true ) );
+ startIndex = i;
+ }
+
+ isDigit = true;
+ } else {
+ if ( isDigit && i > startIndex ) {
+ list.add( parseItem( true, version.substring( startIndex, i ) ) );
+ startIndex = i;
+ }
+
+ isDigit = false;
+ }
+ }
+
+ if ( version.length() > startIndex ) {
+ list.add( parseItem( isDigit, version.substring( startIndex ) ) );
+ }
+
+ while ( !stack.isEmpty() ) {
+ list = (ListItem) stack.pop();
+ list.normalize();
+ }
+
+ canonical = items.toString();
+ }
+
+ private static Item parseItem( boolean isDigit, String buf ) {
+ return isDigit ? new IntegerItem( buf ) : new StringItem( buf, false );
+ }
+
+ public int compareTo( ComparableVersion o ) {
+ return items.compareTo( o.items );
+ }
+
+ public String toString() {
+ return value;
+ }
+
+ public boolean equals( Object o ) {
+ return ( o instanceof ComparableVersion ) && canonical.equals( ( (ComparableVersion) o ).canonical );
+ }
+
+ public int hashCode() {
+ return canonical.hashCode();
+ }
+ }
}
+
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/KieServicesImpl.java b/drools-compiler/src/main/java/org/kie/builder/impl/KieServicesImpl.java
index 8024bf64c42..66a7de75d5f 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/KieServicesImpl.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/KieServicesImpl.java
@@ -1,9 +1,5 @@
package org.kie.builder.impl;
-import static org.drools.compiler.io.memory.MemoryFileSystem.readFromJar;
-
-import java.io.File;
-
import org.drools.audit.KnowledgeRuntimeLoggerProviderImpl;
import org.drools.command.impl.CommandFactoryServiceImpl;
import org.drools.concurrent.ExecutorProviderImpl;
@@ -25,6 +21,10 @@
import org.kie.persistence.jpa.KieStoreServices;
import org.kie.util.ServiceRegistryImpl;
+import java.io.File;
+
+import static org.drools.compiler.io.memory.MemoryFileSystem.readFromJar;
+
public class KieServicesImpl implements KieServices {
private ResourceFactoryService resourceFactory;
@@ -60,7 +60,11 @@ public KieContainer getKieClasspathContainer() {
}
public KieContainer getKieContainer(GAV gav) {
- KieProject kProject = new KieModuleKieProject( ( InternalKieModule ) getKieRepository().getKieModule(gav), getKieRepository() );
+ InternalKieModule kieModule = (InternalKieModule)getKieRepository().getKieModule(gav);
+ if (kieModule == null) {
+ throw new RuntimeException("Cannot find KieModule: " + gav);
+ }
+ KieProject kProject = new KieModuleKieProject( kieModule, getKieRepository() );
return new KieContainerImpl( kProject, getKieRepository() );
}
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/KieHelloWorldTest.java b/drools-compiler/src/test/java/org/drools/integrationtests/KieHelloWorldTest.java
index 09a22735675..5e8ae855efc 100644
--- a/drools-compiler/src/test/java/org/drools/integrationtests/KieHelloWorldTest.java
+++ b/drools-compiler/src/test/java/org/drools/integrationtests/KieHelloWorldTest.java
@@ -84,7 +84,7 @@ public void testHelloWorldWithPackages() throws Exception {
.generateAndWritePomXML( gav )
.write("src/main/resources/KBase1/org/pkg1/r1.drl", drl1)
.write("src/main/resources/KBase1/org/pkg2/r2.drl", drl2)
- .writeKModuleXML( createKieProjectWithPackages(kf, "org.pkg1").toXML());
+ .writeKModuleXML(createKieProjectWithPackages(kf, "org.pkg1").toXML());
ks.newKieBuilder( kfs ).build();
KieSession ksession = ks.getKieContainer(gav).getKieSession("KSession1");
@@ -139,8 +139,64 @@ private KieModuleModel createKieProjectWithPackages(KieFactory kf, String pkg) {
KieSessionModel ksession1 = kieBaseModel1.newKieSessionModel("KSession1")
.setType( KieSessionType.STATEFUL )
- .setClockType( ClockTypeOption.get("realtime") );
+ .setClockType(ClockTypeOption.get("realtime"));
return kproj;
}
-}
+
+ @Test
+ public void testHelloWorldOnVersionRange() throws Exception {
+ KieServices ks = KieServices.Factory.get();
+ KieFactory kf = KieFactory.Factory.get();
+
+ buildVersion(ks, kf, "Hello World", "1.0");
+ buildVersion(ks, kf, "Aloha Earth", "1.1");
+ buildVersion(ks, kf, "Hi Universe", "1.2");
+
+ GAV latestGav = kf.newGav("org.kie", "hello-world", "LATEST");
+
+ KieSession ksession = ks.getKieContainer(latestGav).getKieSession("KSession1");
+ ksession.insert(new Message("Hello World"));
+ assertEquals( 0, ksession.fireAllRules() );
+
+ ksession = ks.getKieContainer(latestGav).getKieSession("KSession1");
+ ksession.insert(new Message("Hi Universe"));
+ assertEquals( 1, ksession.fireAllRules() );
+
+ GAV gav1 = kf.newGav("org.kie", "hello-world", "1.0");
+
+ ksession = ks.getKieContainer(gav1).getKieSession("KSession1");
+ ksession.insert(new Message("Hello World"));
+ assertEquals( 1, ksession.fireAllRules() );
+
+ ksession = ks.getKieContainer(gav1).getKieSession("KSession1");
+ ksession.insert(new Message("Hi Universe"));
+ assertEquals( 0, ksession.fireAllRules() );
+
+ GAV gav2 = kf.newGav("org.kie", "hello-world", "[1.0,1.2)");
+
+ ksession = ks.getKieContainer(gav2).getKieSession("KSession1");
+ ksession.insert(new Message("Aloha Earth"));
+ assertEquals( 1, ksession.fireAllRules() );
+
+ ksession = ks.getKieContainer(gav2).getKieSession("KSession1");
+ ksession.insert(new Message("Hi Universe"));
+ assertEquals( 0, ksession.fireAllRules() );
+ }
+
+ private void buildVersion(KieServices ks, KieFactory kf, String message, String version) {
+ String drl = "package org.drools\n" +
+ "rule R1 when\n" +
+ " $m : Message( message == \"" + message+ "\" )\n" +
+ "then\n" +
+ "end\n";
+
+ GAV gav = kf.newGav("org.kie", "hello-world", version);
+
+ KieFileSystem kfs = kf.newKieFileSystem()
+ .generateAndWritePomXML( gav )
+ .write("src/main/resources/KBase1/org/pkg1/r1.drl", drl)
+ .writeKModuleXML(createKieProjectWithPackages(kf, "*").toXML());
+ ks.newKieBuilder( kfs ).build();
+ }
+}
\ No newline at end of file
diff --git a/kie-ci/src/main/java/org/drools/scanner/DependencyDescriptor.java b/kie-ci/src/main/java/org/drools/scanner/DependencyDescriptor.java
index bef63bce73d..bb664538a27 100644
--- a/kie-ci/src/main/java/org/drools/scanner/DependencyDescriptor.java
+++ b/kie-ci/src/main/java/org/drools/scanner/DependencyDescriptor.java
@@ -67,7 +67,7 @@ public String getType() {
}
public boolean isFixedVersion() {
- return !isSnapshot() && !version.equals("LATEST") && !version.equals(")");
+ return !isSnapshot() && !version.equals("LATEST") && !version.equals("RELEASE");
}
public boolean isSnapshot() {
diff --git a/kie-ci/src/main/java/org/drools/scanner/KieRepositoryScannerImpl.java b/kie-ci/src/main/java/org/drools/scanner/KieRepositoryScannerImpl.java
index 9cad4a12bbe..c57c373347e 100644
--- a/kie-ci/src/main/java/org/drools/scanner/KieRepositoryScannerImpl.java
+++ b/kie-ci/src/main/java/org/drools/scanner/KieRepositoryScannerImpl.java
@@ -166,6 +166,9 @@ private Collection<Artifact> scanForUpdates(Collection<DependencyDescriptor> dep
List<Artifact> newArtifacts = new ArrayList<Artifact>();
for (DependencyDescriptor dependency : dependencies) {
Artifact newArtifact = getArtifactResolver().resolveArtifact(dependency.toResolvableString());
+ if (newArtifact == null) {
+ continue;
+ }
DependencyDescriptor resolvedDep = new DependencyDescriptor(newArtifact);
if (resolvedDep.isNewerThan(dependency)) {
newArtifacts.add(newArtifact);
diff --git a/kie-ci/src/test/java/org/drools/scanner/KieRepositoryScannerTest.java b/kie-ci/src/test/java/org/drools/scanner/KieRepositoryScannerTest.java
index 95b75930554..77b33dee482 100644
--- a/kie-ci/src/test/java/org/drools/scanner/KieRepositoryScannerTest.java
+++ b/kie-ci/src/test/java/org/drools/scanner/KieRepositoryScannerTest.java
@@ -141,7 +141,7 @@ public void testScannerOnPomProject() throws Exception {
InternalKieModule kJar1 = createKieJarWithClass(ks, kf, gav1, 2, 7);
repository.deployArtifact(gav1, kJar1, createKPom(gav1));
- KieContainer kieContainer = ks.getKieContainer(kf.newGav("org.kie", "scanner-master-test", "1.0"));
+ KieContainer kieContainer = ks.getKieContainer(kf.newGav("org.kie", "scanner-master-test", "LATEST"));
KieSession ksession = kieContainer.getKieSession("KSession1");
checkKSession(ksession, 14);
|
d9dbcbd78c093f4d7d326babab7d64262c2e0280
|
drools
|
JBRULES-340 core implementation for 'from' -The- from node is now added -ReteooBuilder is added, but not unit tested.--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@5713 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
a
|
https://github.com/kiegroup/drools
|
diff --git a/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java b/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java
index b10c07473da..f6cfd376433 100644
--- a/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java
+++ b/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java
@@ -95,7 +95,7 @@ public abstract class AbstractWorkingMemory
protected final AgendaEventSupport agendaEventSupport = new AgendaEventSupport( this );
/** The <code>RuleBase</code> with which this memory is associated. */
- protected transient InternalRuleBase ruleBase;
+ protected transient InternalRuleBase ruleBase;
protected final FactHandleFactory handleFactory;
@@ -156,40 +156,75 @@ public AbstractWorkingMemory(final int id,
void setRuleBase(InternalRuleBase ruleBase) {
this.ruleBase = ruleBase;
}
-
+
public void addEventListener(final WorkingMemoryEventListener listener) {
- this.workingMemoryEventSupport.addEventListener( listener );
+ try {
+ lock.lock();
+ this.workingMemoryEventSupport.addEventListener( listener );
+ } finally {
+ lock.unlock();
+ }
}
public void removeEventListener(final WorkingMemoryEventListener listener) {
- this.workingMemoryEventSupport.removeEventListener( listener );
+ try {
+ lock.lock();
+ this.workingMemoryEventSupport.removeEventListener( listener );
+ } finally {
+ lock.unlock();
+ }
}
public List getWorkingMemoryEventListeners() {
- return this.workingMemoryEventSupport.getEventListeners();
+ try {
+ lock.lock();
+ return this.workingMemoryEventSupport.getEventListeners();
+ } finally {
+ lock.unlock();
+ }
}
public void addEventListener(final AgendaEventListener listener) {
- this.agendaEventSupport.addEventListener( listener );
+ try {
+ lock.lock();
+ this.agendaEventSupport.addEventListener( listener );
+ } finally {
+ lock.unlock();
+ }
}
public void removeEventListener(final AgendaEventListener listener) {
- this.agendaEventSupport.removeEventListener( listener );
+ try {
+ lock.lock();
+ this.agendaEventSupport.removeEventListener( listener );
+ } finally {
+ lock.unlock();
+ }
}
- public FactHandleFactory getFactHandleFactory() {
- return this.handleFactory;
+ public List getAgendaEventListeners() {
+ try {
+ lock.lock();
+ return this.agendaEventSupport.getEventListeners();
+ } finally {
+ lock.unlock();
+ }
}
- public List getAgendaEventListeners() {
- return this.agendaEventSupport.getEventListeners();
+ public FactHandleFactory getFactHandleFactory() {
+ return this.handleFactory;
}
/**
* @see WorkingMemory
*/
public Map getGlobals() {
- return this.globals;
+ try {
+ lock.lock();
+ return this.globals;
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -197,20 +232,25 @@ public Map getGlobals() {
*/
public void setGlobal(final String name,
final Object value) {
- // Make sure the global has been declared in the RuleBase
- final Map globalDefintions = this.ruleBase.getGlobals();
- final Class type = (Class) globalDefintions.get( name );
- if ( (type == null) ) {
- throw new RuntimeException( "Unexpected global [" + name + "]" );
- } else if ( !type.isInstance( value ) ) {
- throw new RuntimeException( "Illegal class for global. " + "Expected [" + type.getName() + "], " + "found [" + value.getClass().getName() + "]." );
+ try {
+ lock.lock();
+ // Make sure the global has been declared in the RuleBase
+ final Map globalDefintions = this.ruleBase.getGlobals();
+ final Class type = (Class) globalDefintions.get( name );
+ if ( (type == null) ) {
+ throw new RuntimeException( "Unexpected global [" + name + "]" );
+ } else if ( !type.isInstance( value ) ) {
+ throw new RuntimeException( "Illegal class for global. " + "Expected [" + type.getName() + "], " + "found [" + value.getClass().getName() + "]." );
- } else {
- this.globals.put( name,
- value );
+ } else {
+ this.globals.put( name,
+ value );
+ }
+ } finally {
+ lock.unlock();
}
}
-
+
public long getId() {
return this.id;
}
@@ -219,8 +259,13 @@ public long getId() {
* @see WorkingMemory
*/
public Object getGlobal(final String name) {
- final Object object = this.globals.get( name );
- return object;
+ try {
+ lock.lock();
+ final Object object = this.globals.get( name );
+ return object;
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -293,14 +338,19 @@ public synchronized void fireAllRules(final AgendaFilter agendaFilter) throws Fa
*
*/
public Object getObject(final FactHandle handle) {
- // you must always take the value from the assertMap, incase the handle
- // is not from this WorkingMemory
- InternalFactHandle factHandle = (InternalFactHandle) this.assertMap.get( handle );
- if ( factHandle != null ) {
- return factHandle.getObject();
- }
+ try {
+ lock.lock();
+ // you must always take the value from the assertMap, incase the handle
+ // is not from this WorkingMemory
+ InternalFactHandle factHandle = (InternalFactHandle) this.assertMap.get( handle );
+ if ( factHandle != null ) {
+ return factHandle.getObject();
+ }
- return null;
+ return null;
+ } finally {
+ lock.unlock();
+ }
}
@@ -308,13 +358,23 @@ public Object getObject(final FactHandle handle) {
* @see WorkingMemory
*/
public FactHandle getFactHandle(final Object object) {
- final FactHandle factHandle = (FactHandle) this.assertMap.get( object );
+ try {
+ lock.lock();
+ final FactHandle factHandle = (FactHandle) this.assertMap.get( object );
- return factHandle;
+ return factHandle;
+ } finally {
+ lock.unlock();
+ }
}
public List getFactHandles() {
- return new ArrayList( this.assertMap.values() );
+ try {
+ lock.lock();
+ return new ArrayList( this.assertMap.values() );
+ } finally {
+ lock.unlock();
+ }
}
/**
@@ -422,8 +482,8 @@ public FactHandle assertObject(final Object object,
return null;
}
InternalFactHandle handle = null;
- this.lock.lock();
try {
+ this.lock.lock();
// check if the object already exists in the WM
handle = (InternalFactHandle) this.assertMap.get( object );
@@ -656,8 +716,8 @@ public void retractObject(final FactHandle factHandle,
final boolean updateEqualsMap,
final Rule rule,
final Activation activation) throws FactException {
- this.lock.lock();
try {
+ this.lock.lock();
final InternalFactHandle handle = (InternalFactHandle) factHandle;
if ( handle.getId() == -1 ) {
// can't retract an already retracted handle
diff --git a/drools-core/src/main/java/org/drools/common/BetaNodeBinder.java b/drools-core/src/main/java/org/drools/common/BetaNodeBinder.java
index 5b7ea409544..326442b2592 100644
--- a/drools-core/src/main/java/org/drools/common/BetaNodeBinder.java
+++ b/drools-core/src/main/java/org/drools/common/BetaNodeBinder.java
@@ -60,7 +60,7 @@ public boolean isAllowed(final InternalFactHandle handle,
}
for ( int i = 0; i < this.constraints.length; i++ ) {
- if ( !this.constraints[i].isAllowed( handle,
+ if ( !this.constraints[i].isAllowed( handle.getObject(),
tuple,
workingMemory ) ) {
return false;
diff --git a/drools-core/src/main/java/org/drools/common/InstanceEqualsConstraint.java b/drools-core/src/main/java/org/drools/common/InstanceEqualsConstraint.java
index 52cacdf05e9..61008d66b87 100644
--- a/drools-core/src/main/java/org/drools/common/InstanceEqualsConstraint.java
+++ b/drools-core/src/main/java/org/drools/common/InstanceEqualsConstraint.java
@@ -48,10 +48,10 @@ public Declaration[] getRequiredDeclarations() {
return this.declarations;
}
- public boolean isAllowed(final InternalFactHandle handle,
+ public boolean isAllowed(final Object object,
final Tuple tuple,
final WorkingMemory workingMemory) {
- return (tuple.get( this.otherColumn ).getObject() == handle.getObject());
+ return (tuple.get( this.otherColumn ).getObject() == object);
}
public String toString() {
diff --git a/drools-core/src/main/java/org/drools/common/InstanceNotEqualsConstraint.java b/drools-core/src/main/java/org/drools/common/InstanceNotEqualsConstraint.java
index 89cb4eb38b5..2be1baefb7c 100644
--- a/drools-core/src/main/java/org/drools/common/InstanceNotEqualsConstraint.java
+++ b/drools-core/src/main/java/org/drools/common/InstanceNotEqualsConstraint.java
@@ -41,10 +41,10 @@ public Declaration[] getRequiredDeclarations() {
return this.declarations;
}
- public boolean isAllowed(final InternalFactHandle handle,
+ public boolean isAllowed(final Object object,
final Tuple tuple,
final WorkingMemory workingMemory) {
- return !(tuple.get( this.otherColumn ).getObject() == handle.getObject());
+ return !(tuple.get( this.otherColumn ).getObject() == object);
}
public String toString() {
diff --git a/drools-core/src/main/java/org/drools/leaps/AlphaMemory.java b/drools-core/src/main/java/org/drools/leaps/AlphaMemory.java
index 3d2ee8ec637..ec11da7389a 100644
--- a/drools-core/src/main/java/org/drools/leaps/AlphaMemory.java
+++ b/drools-core/src/main/java/org/drools/leaps/AlphaMemory.java
@@ -19,7 +19,7 @@ boolean checkAlpha( final FieldConstraint alpha,
final WorkingMemory workingMemory ) {
Boolean ret = (Boolean) this.alphaChecks.get( factHandle );
if (ret == null) {
- ret = new Boolean( alpha.isAllowed( factHandle, tuple, workingMemory ) );
+ ret = new Boolean( alpha.isAllowed( factHandle.getObject(), tuple, workingMemory ) );
this.alphaChecks.put( factHandle, ret );
}
diff --git a/drools-core/src/main/java/org/drools/leaps/ColumnConstraints.java b/drools-core/src/main/java/org/drools/leaps/ColumnConstraints.java
index 1b3f6f03be6..99de09d18ab 100644
--- a/drools-core/src/main/java/org/drools/leaps/ColumnConstraints.java
+++ b/drools-core/src/main/java/org/drools/leaps/ColumnConstraints.java
@@ -85,7 +85,7 @@ public final boolean isAllowedAlpha(final InternalFactHandle factHandle,
if ( this.alphaPresent ) {
for ( int i = 0, length = this.alphaConstraints.length; i < length; i++ ) {
// escape immediately if some condition does not match
- if ( !this.alphaConstraints[i].isAllowed( factHandle,
+ if ( !this.alphaConstraints[i].isAllowed( factHandle.getObject(),
tuple,
workingMemory ) ) {
return false;
diff --git a/drools-core/src/main/java/org/drools/leaps/LeapsWorkingMemory.java b/drools-core/src/main/java/org/drools/leaps/LeapsWorkingMemory.java
index af7d4513e68..32bee42bb93 100644
--- a/drools-core/src/main/java/org/drools/leaps/LeapsWorkingMemory.java
+++ b/drools-core/src/main/java/org/drools/leaps/LeapsWorkingMemory.java
@@ -71,7 +71,7 @@ class LeapsWorkingMemory extends AbstractWorkingMemory
implements
EventSupport,
PropertyChangeListener {
- private static final long serialVersionUID = -2524904474925421759L;
+ private static final long serialVersionUID = 320;
private final Map queryResults;
@@ -326,8 +326,8 @@ public void modifyObject(final FactHandle factHandle,
final Object object,
final Rule rule,
final Activation activation) throws FactException {
- this.getLock().lock();
try {
+ this.getLock().lock();
final PropagationContext propagationContext = new PropagationContextImpl( this.propagationIdCounter++,
PropagationContext.MODIFICATION,
rule,
diff --git a/drools-core/src/main/java/org/drools/reteoo/AlphaNode.java b/drools-core/src/main/java/org/drools/reteoo/AlphaNode.java
index 709baa87638..b65c2fd9f76 100644
--- a/drools-core/src/main/java/org/drools/reteoo/AlphaNode.java
+++ b/drools-core/src/main/java/org/drools/reteoo/AlphaNode.java
@@ -131,7 +131,7 @@ public void assertObject(final DefaultFactHandle handle,
final PropagationContext context,
final ReteooWorkingMemory workingMemory) throws FactException {
final Set memory = (Set) workingMemory.getNodeMemory( this );
- if ( this.constraint.isAllowed( handle,
+ if ( this.constraint.isAllowed( handle.getObject(),
null,
workingMemory ) ) {
memory.add( handle );
@@ -157,7 +157,7 @@ public void modifyObject(final DefaultFactHandle handle,
final ReteooWorkingMemory workingMemory) {
final Set memory = (Set) workingMemory.getNodeMemory( this );
- if ( this.constraint.isAllowed( handle,
+ if ( this.constraint.isAllowed( handle.getObject(),
null,
workingMemory ) ) {
if ( memory.add( handle ) ) {
diff --git a/drools-core/src/main/java/org/drools/reteoo/FromNode.java b/drools-core/src/main/java/org/drools/reteoo/FromNode.java
new file mode 100644
index 00000000000..d0b263655ae
--- /dev/null
+++ b/drools-core/src/main/java/org/drools/reteoo/FromNode.java
@@ -0,0 +1,236 @@
+package org.drools.reteoo;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.drools.RuleBaseConfiguration;
+import org.drools.common.BetaNodeBinder;
+import org.drools.common.DefaultFactHandle;
+import org.drools.common.InternalFactHandle;
+import org.drools.common.NodeMemory;
+import org.drools.common.PropagationContextImpl;
+import org.drools.rule.Declaration;
+import org.drools.rule.EvalCondition;
+import org.drools.rule.From;
+import org.drools.spi.Constraint;
+import org.drools.spi.DataProvider;
+import org.drools.spi.FieldConstraint;
+import org.drools.spi.PropagationContext;
+
+public class FromNode extends TupleSource
+ implements
+ TupleSink,
+ NodeMemory {
+ /**
+ *
+ */
+ private static final long serialVersionUID = 320;
+
+ private DataProvider dataProvider;
+ private TupleSource tupleSource;
+ private FieldConstraint[] constraints;
+ private BetaNodeBinder binder;
+
+ public FromNode(final int id,
+ final DataProvider dataProvider,
+ final TupleSource tupleSource,
+ final FieldConstraint[] constraints,
+ final BetaNodeBinder binder) {
+ super( id );
+ this.dataProvider = dataProvider;
+ this.tupleSource = tupleSource;
+ this.constraints = constraints;
+ if ( binder == null ) {
+ this.binder = new BetaNodeBinder();
+ } else {
+ this.binder = binder;
+ }
+ }
+
+ /**
+ * This method isn't as efficient as it could be, as its using the standard join node mechanisms - so everything is bidirectionally
+ * linked. As FactHandle's are never retracted, this relationship does not need to be maintined - but as this optimisation would
+ * need refactoring, I've used the standard join node mechanism for now.
+ *
+ */
+ public void assertTuple(ReteTuple leftTuple,
+ PropagationContext context,
+ ReteooWorkingMemory workingMemory) {
+ final BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( this );
+
+ memory.add( workingMemory,
+ leftTuple );
+
+ for ( Iterator it = this.dataProvider.getResults( leftTuple ); it.hasNext(); ) {
+ Object object = it.next();
+
+ // First alpha node filters
+ boolean isAllowed = true;
+ for ( int i = 0, length = this.constraints.length; i < length; i++ ) {
+ if ( !this.constraints[i].isAllowed( object, leftTuple, workingMemory ) ) {
+ isAllowed = false;
+ break;
+ }
+ }
+
+ if ( !isAllowed ) {
+ continue;
+ }
+
+ final InternalFactHandle handle = workingMemory.getFactHandleFactory().newFactHandle( object );
+ final ObjectMatches objectMatches = new ObjectMatches( (DefaultFactHandle) handle );
+
+ if ( binder.isAllowed( handle,
+ leftTuple,
+ workingMemory ) ) {
+ final TupleMatch tupleMatch = new TupleMatch( leftTuple,
+ objectMatches );
+
+ leftTuple.addTupleMatch( (DefaultFactHandle) handle,
+ tupleMatch );
+
+ propagateAssertTuple( new ReteTuple( leftTuple,
+ (DefaultFactHandle) handle ),
+ tupleMatch,
+ context,
+ workingMemory );
+ }
+ }
+ }
+
+ /**
+ * This could be made more intelligent by finding out if the modified Fact is depended upon by the requiredDeclarations.
+ * If it isn't then we can continue to just propagate as a normal modify, without having to retrieve and check values
+ * from the DataProvider.
+ */
+ public void modifyTuple(ReteTuple leftTuple,
+ PropagationContext context,
+ ReteooWorkingMemory workingMemory) {
+ final BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( this );
+
+ // We remove the tuple as now its modified it needs to go to the top of
+ // the stack, which is added back in else where
+ memory.remove( workingMemory,
+ leftTuple );
+
+ final Map matches = leftTuple.getTupleMatches();
+
+ if ( matches.isEmpty() ) {
+ // No child propagations, so try as a new assert, will ensure the
+ // tuple is added to the top of the memory
+ assertTuple( leftTuple,
+ context,
+ workingMemory );
+ } else {
+ // first purge the network of all future uses of the 'from' facts
+ for ( final Iterator it = matches.values().iterator(); it.hasNext(); ) {
+ final TupleMatch tupleMatch = (TupleMatch) it.next();
+ workingMemory.getFactHandleFactory().destroyFactHandle( tupleMatch.getObjectMatches().getFactHandle() );
+ propagateRetractTuple( tupleMatch,
+ context,
+ workingMemory );
+ }
+
+ // now all existing matches must now be cleared and the DataProvider re-processed.
+ leftTuple.clearTupleMatches();
+
+ assertTuple( leftTuple,
+ context,
+ workingMemory );
+
+ }
+ }
+
+ public void retractTuple(ReteTuple leftTuple,
+ PropagationContext context,
+ ReteooWorkingMemory workingMemory) {
+ final BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( this );
+ memory.remove( workingMemory,
+ leftTuple );
+
+ final Map matches = leftTuple.getTupleMatches();
+
+ if ( !matches.isEmpty() ) {
+ for ( final Iterator it = matches.values().iterator(); it.hasNext(); ) {
+ final TupleMatch tupleMatch = (TupleMatch) it.next();
+ workingMemory.getFactHandleFactory().destroyFactHandle( tupleMatch.getObjectMatches().getFactHandle() );
+ propagateRetractTuple( tupleMatch,
+ context,
+ workingMemory );
+ }
+ }
+ }
+
+ public List getPropagatedTuples(ReteooWorkingMemory workingMemory,
+ TupleSink sink) {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public void attach() {
+ this.tupleSource.addTupleSink( this );
+ }
+
+ public void attach(ReteooWorkingMemory[] workingMemories) {
+ attach();
+
+ for ( int i = 0, length = workingMemories.length; i < length; i++ ) {
+ final ReteooWorkingMemory workingMemory = workingMemories[i];
+ final PropagationContext propagationContext = new PropagationContextImpl( workingMemory.getNextPropagationIdCounter(),
+ PropagationContext.RULE_ADDITION,
+ null,
+ null );
+ this.tupleSource.updateNewNode( workingMemory,
+ propagationContext );
+ }
+ }
+
+ public void remove(BaseNode node,
+ ReteooWorkingMemory[] workingMemories) {
+ getTupleSinks().remove( node );
+ removeShare();
+
+ if ( this.sharedCount < 0 ) {
+ for ( int i = 0, length = workingMemories.length; i < length; i++ ) {
+ workingMemories[i].clearNodeMemory( this );
+ }
+ this.tupleSource.remove( this,
+ workingMemories );
+ }
+ }
+
+ public void updateNewNode(ReteooWorkingMemory workingMemory,
+ PropagationContext context) {
+ this.attachingNewNode = true;
+
+ final BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( this );
+
+ // @todo:as there is no right memory
+
+ // for ( final Iterator it = memory.getRightObjectMemory().iterator(); it.hasNext(); ) {
+ // final ObjectMatches objectMatches = (ObjectMatches) it.next();
+ // final DefaultFactHandle handle = objectMatches.getFactHandle();
+ // for ( TupleMatch tupleMatch = objectMatches.getFirstTupleMatch(); tupleMatch != null; tupleMatch = (TupleMatch) tupleMatch.getNext() ) {
+ // final ReteTuple tuple = new ReteTuple( tupleMatch.getTuple(),
+ // handle );
+ // final TupleSink sink = (TupleSink) this.tupleSinks.get( this.tupleSinks.size() - 1 );
+ // if ( sink != null ) {
+ // tupleMatch.addJoinedTuple( tuple );
+ // sink.assertTuple( tuple,
+ // context,
+ // workingMemory );
+ // } else {
+ // throw new RuntimeException( "Possible BUG: trying to propagate an assert to a node that was the last added node" );
+ // }
+ // }
+ // }
+
+ this.attachingNewNode = false;
+ }
+
+ public Object createMemory(RuleBaseConfiguration config) {
+ return new BetaMemory( config,
+ this.binder );
+ }
+}
diff --git a/drools-core/src/main/java/org/drools/reteoo/ReteTuple.java b/drools-core/src/main/java/org/drools/reteoo/ReteTuple.java
index 7a2ed20cf43..2b04de5b13d 100644
--- a/drools-core/src/main/java/org/drools/reteoo/ReteTuple.java
+++ b/drools-core/src/main/java/org/drools/reteoo/ReteTuple.java
@@ -203,6 +203,10 @@ public void clearLinkedTuple() {
this.linkedTuples.clear();
}
+ public void clearTupleMatches() {
+ this.matches.clear();
+ }
+
public void addTupleMatch(final DefaultFactHandle handle,
final TupleMatch node) {
if ( this.matches == Collections.EMPTY_MAP ) {
diff --git a/drools-core/src/main/java/org/drools/reteoo/ReteooBuilder.java b/drools-core/src/main/java/org/drools/reteoo/ReteooBuilder.java
index 53dd1363397..4cf8a5bb231 100644
--- a/drools-core/src/main/java/org/drools/reteoo/ReteooBuilder.java
+++ b/drools-core/src/main/java/org/drools/reteoo/ReteooBuilder.java
@@ -41,6 +41,7 @@
import org.drools.rule.Declaration;
import org.drools.rule.EvalCondition;
import org.drools.rule.Exists;
+import org.drools.rule.From;
import org.drools.rule.GroupElement;
import org.drools.rule.InvalidPatternException;
import org.drools.rule.LiteralConstraint;
@@ -555,6 +556,63 @@ private TupleSource attachNode(final TupleSource candidate) {
return node;
}
+
+ private void attachFrom(final TupleSource tupleSource,
+ final From from) {
+ Column column = from.getColumn();
+
+ // Adjusting offset in case a previous Initial-Fact was added to the network
+ column.adjustOffset( this.currentOffsetAdjustment );
+
+ final List constraints = column.getConstraints();
+
+ // Check if the Column is bound
+ if ( column.getDeclaration() != null ) {
+ final Declaration declaration = column.getDeclaration();
+ // Add the declaration the map of previously bound declarations
+ this.declarations.put( declaration.getIdentifier(),
+ declaration );
+ }
+
+ final List predicateConstraints = new ArrayList();
+ final List alphaNodeConstraints = new ArrayList();
+
+ for ( final Iterator it = constraints.iterator(); it.hasNext(); ) {
+ final Object object = it.next();
+ // Check if its a declaration
+ if ( object instanceof Declaration ) {
+ final Declaration declaration = (Declaration) object;
+ // Add the declaration the map of previously bound declarations
+ this.declarations.put( declaration.getIdentifier(),
+ declaration );
+ continue;
+ }
+
+ final FieldConstraint fieldConstraint = (FieldConstraint) object;
+ if ( fieldConstraint instanceof LiteralConstraint ) {
+ alphaNodeConstraints.add( fieldConstraint );
+ } else {
+ checkUnboundDeclarations( fieldConstraint.getRequiredDeclarations() );
+ predicateConstraints.add( fieldConstraint );
+ }
+ }
+
+
+ BetaNodeBinder binder;
+
+ if ( !predicateConstraints.isEmpty() ) {
+ binder = new BetaNodeBinder( (FieldConstraint[]) predicateConstraints.toArray( new FieldConstraint[predicateConstraints.size()] ) );
+ } else {
+ binder = new BetaNodeBinder();
+ }
+
+ FromNode node = new FromNode( id,
+ from.getDataProvider(),
+ this.tupleSource,
+ ( FieldConstraint[] ) alphaNodeConstraints.toArray( new FieldConstraint[ alphaNodeConstraints.size() ] ),
+ binder );
+
+ }
private ObjectSource attachNode(final ObjectSource candidate) {
ObjectSource node = (ObjectSource) this.attachedNodes.get( candidate );
diff --git a/drools-core/src/main/java/org/drools/reteoo/ReteooWorkingMemory.java b/drools-core/src/main/java/org/drools/reteoo/ReteooWorkingMemory.java
index 6e58b189b2c..d0fc775bfb5 100644
--- a/drools-core/src/main/java/org/drools/reteoo/ReteooWorkingMemory.java
+++ b/drools-core/src/main/java/org/drools/reteoo/ReteooWorkingMemory.java
@@ -48,7 +48,7 @@ public class ReteooWorkingMemory extends AbstractWorkingMemory {
/**
*
*/
- private static final long serialVersionUID = -5107074490638575715L;
+ private static final long serialVersionUID = 320;
/**
* Construct.
@@ -87,8 +87,8 @@ public void modifyObject(final FactHandle factHandle,
final Object object,
final Rule rule,
final Activation activation) throws FactException {
- this.lock.lock();
try {
+ this.lock.lock();
final int status = ((InternalFactHandle) factHandle).getEqualityKey().getStatus();
final InternalFactHandle handle = (InternalFactHandle) factHandle;
final Object originalObject = handle.getObject();
diff --git a/drools-core/src/main/java/org/drools/reteoo/TerminalNode.java b/drools-core/src/main/java/org/drools/reteoo/TerminalNode.java
index 1b20118858c..2d28ed3cbc9 100644
--- a/drools-core/src/main/java/org/drools/reteoo/TerminalNode.java
+++ b/drools-core/src/main/java/org/drools/reteoo/TerminalNode.java
@@ -52,7 +52,7 @@ final class TerminalNode extends BaseNode
/**
*
*/
- private static final long serialVersionUID = -4172639826881353001L;
+ private static final long serialVersionUID = 320;
/** The rule to invoke upon match. */
private final Rule rule;
private final TupleSource tupleSource;
diff --git a/drools-core/src/main/java/org/drools/rule/AndCompositeRestriction.java b/drools-core/src/main/java/org/drools/rule/AndCompositeRestriction.java
index 0c2fe45e803..1fd2668ef83 100644
--- a/drools-core/src/main/java/org/drools/rule/AndCompositeRestriction.java
+++ b/drools-core/src/main/java/org/drools/rule/AndCompositeRestriction.java
@@ -4,7 +4,6 @@
import java.util.Set;
import org.drools.WorkingMemory;
-import org.drools.common.InternalFactHandle;
import org.drools.spi.Restriction;
import org.drools.spi.Tuple;
@@ -17,13 +16,11 @@ public AndCompositeRestriction(Restriction[] restriction) {
}
public boolean isAllowed(final Object object,
- final InternalFactHandle handle,
final Tuple tuple,
final WorkingMemory workingMemory) {
for ( int i = 0, ilength = this.restrictions.length; i < ilength; i++ ) {
if ( !restrictions[i].isAllowed( object,
- handle,
tuple,
workingMemory ) ) {
return false;
diff --git a/drools-core/src/main/java/org/drools/rule/Column.java b/drools-core/src/main/java/org/drools/rule/Column.java
index 6358c5140aa..e05f550552b 100644
--- a/drools-core/src/main/java/org/drools/rule/Column.java
+++ b/drools-core/src/main/java/org/drools/rule/Column.java
@@ -33,7 +33,7 @@ public class Column
/**
*
*/
- private static final long serialVersionUID = 9167552040211010022L;
+ private static final long serialVersionUID = 320;
private final ObjectType objectType;
private List constraints = Collections.EMPTY_LIST;
final Declaration declaration;
diff --git a/drools-core/src/main/java/org/drools/rule/From.java b/drools-core/src/main/java/org/drools/rule/From.java
new file mode 100644
index 00000000000..1d34d086dc5
--- /dev/null
+++ b/drools-core/src/main/java/org/drools/rule/From.java
@@ -0,0 +1,25 @@
+package org.drools.rule;
+
+import java.io.Serializable;
+
+import org.drools.spi.DataProvider;
+
+public class From implements Serializable{
+ private Column column;
+
+ private DataProvider dataProvider;
+
+ public From(final Column column,
+ final DataProvider dataProvider) {
+ this.column = column;
+ this.dataProvider = dataProvider;
+ }
+
+ public Column getColumn() {
+ return column;
+ }
+
+ public DataProvider getDataProvider() {
+ return dataProvider;
+ }
+}
diff --git a/drools-core/src/main/java/org/drools/rule/LiteralConstraint.java b/drools-core/src/main/java/org/drools/rule/LiteralConstraint.java
index 8091b853a65..e84e1abea08 100644
--- a/drools-core/src/main/java/org/drools/rule/LiteralConstraint.java
+++ b/drools-core/src/main/java/org/drools/rule/LiteralConstraint.java
@@ -17,7 +17,6 @@
*/
import org.drools.WorkingMemory;
-import org.drools.common.InternalFactHandle;
import org.drools.spi.Evaluator;
import org.drools.spi.FieldConstraint;
import org.drools.spi.FieldExtractor;
@@ -71,10 +70,10 @@ public Declaration[] getRequiredDeclarations() {
return this.restriction.getRequiredDeclarations();
}
- public boolean isAllowed(final InternalFactHandle handle,
+ public boolean isAllowed(final Object object,
final Tuple tuple,
final WorkingMemory workingMemory) {
- return this.restriction.isAllowed( this.extractor.getValue( handle.getObject() ), handle, tuple, workingMemory );
+ return this.restriction.isAllowed( this.extractor.getValue( object ), tuple, workingMemory );
}
public String toString() {
diff --git a/drools-core/src/main/java/org/drools/rule/LiteralRestriction.java b/drools-core/src/main/java/org/drools/rule/LiteralRestriction.java
index 5e2f85db22b..c422d889941 100644
--- a/drools-core/src/main/java/org/drools/rule/LiteralRestriction.java
+++ b/drools-core/src/main/java/org/drools/rule/LiteralRestriction.java
@@ -17,7 +17,6 @@
*/
import org.drools.WorkingMemory;
-import org.drools.common.InternalFactHandle;
import org.drools.spi.Evaluator;
import org.drools.spi.FieldConstraint;
import org.drools.spi.FieldExtractor;
@@ -64,7 +63,6 @@ public Declaration[] getRequiredDeclarations() {
}
public boolean isAllowed(final Object object,
- final InternalFactHandle handle,
final Tuple tuple,
final WorkingMemory workingMemory) {
return this.evaluator.evaluate( object,
diff --git a/drools-core/src/main/java/org/drools/rule/MultiRestrictionFieldConstraint.java b/drools-core/src/main/java/org/drools/rule/MultiRestrictionFieldConstraint.java
index 143690164fd..ddca8293c45 100644
--- a/drools-core/src/main/java/org/drools/rule/MultiRestrictionFieldConstraint.java
+++ b/drools-core/src/main/java/org/drools/rule/MultiRestrictionFieldConstraint.java
@@ -4,7 +4,6 @@
import java.util.Set;
import org.drools.WorkingMemory;
-import org.drools.common.InternalFactHandle;
import org.drools.spi.Evaluator;
import org.drools.spi.Extractor;
import org.drools.spi.FieldConstraint;
@@ -40,11 +39,10 @@ public Declaration[] getRequiredDeclarations() {
return this.restrictions.getRequiredDeclarations();
}
- public boolean isAllowed(final InternalFactHandle handle,
+ public boolean isAllowed(final Object object,
final Tuple tuple,
final WorkingMemory workingMemory) {
- return this.restrictions.isAllowed( this.extractor.getValue( handle.getObject() ),
- handle,
+ return this.restrictions.isAllowed( this.extractor.getValue( object ),
tuple,
workingMemory );
}
diff --git a/drools-core/src/main/java/org/drools/rule/OrCompositeRestriction.java b/drools-core/src/main/java/org/drools/rule/OrCompositeRestriction.java
index 56fb080a103..6f8e19f36bb 100644
--- a/drools-core/src/main/java/org/drools/rule/OrCompositeRestriction.java
+++ b/drools-core/src/main/java/org/drools/rule/OrCompositeRestriction.java
@@ -5,7 +5,6 @@
import java.util.Set;
import org.drools.WorkingMemory;
-import org.drools.common.InternalFactHandle;
import org.drools.spi.Restriction;
import org.drools.spi.Tuple;
@@ -18,13 +17,11 @@ public OrCompositeRestriction(Restriction[] restriction) {
}
public boolean isAllowed(final Object object,
- final InternalFactHandle handle,
final Tuple tuple,
final WorkingMemory workingMemory) {
for ( int i = 0, ilength = this.restrictions.length; i < ilength; i++ ) {
if ( restrictions[i].isAllowed( object,
- handle,
tuple,
workingMemory ) ) {
return true;
diff --git a/drools-core/src/main/java/org/drools/rule/PredicateConstraint.java b/drools-core/src/main/java/org/drools/rule/PredicateConstraint.java
index ea108f33a21..a0e5e40bf93 100644
--- a/drools-core/src/main/java/org/drools/rule/PredicateConstraint.java
+++ b/drools-core/src/main/java/org/drools/rule/PredicateConstraint.java
@@ -18,7 +18,6 @@
import org.drools.RuntimeDroolsException;
import org.drools.WorkingMemory;
-import org.drools.common.InternalFactHandle;
import org.drools.spi.FieldConstraint;
import org.drools.spi.PredicateExpression;
import org.drools.spi.Tuple;
@@ -85,12 +84,12 @@ public String toString() {
return "[PredicateConstraint declarations=" + this.requiredDeclarations + "]";
}
- public boolean isAllowed(final InternalFactHandle handle,
+ public boolean isAllowed(final Object object,
final Tuple tuple,
final WorkingMemory workingMemory) {
try {
- return this.expression.evaluate( tuple,
- handle,
+ return this.expression.evaluate( object,
+ tuple,
this.declaration,
this.requiredDeclarations,
workingMemory );
diff --git a/drools-core/src/main/java/org/drools/rule/ReturnValueConstraint.java b/drools-core/src/main/java/org/drools/rule/ReturnValueConstraint.java
index 66ab234668a..26fe31fc289 100644
--- a/drools-core/src/main/java/org/drools/rule/ReturnValueConstraint.java
+++ b/drools-core/src/main/java/org/drools/rule/ReturnValueConstraint.java
@@ -18,7 +18,6 @@
import org.drools.RuntimeDroolsException;
import org.drools.WorkingMemory;
-import org.drools.common.InternalFactHandle;
import org.drools.spi.Evaluator;
import org.drools.spi.FieldConstraint;
import org.drools.spi.FieldExtractor;
@@ -77,11 +76,10 @@ public Evaluator getEvaluator() {
return this.restriction.getEvaluator();
}
- public boolean isAllowed(final InternalFactHandle handle,
+ public boolean isAllowed(final Object object,
final Tuple tuple,
final WorkingMemory workingMemory) {
- return this.restriction.isAllowed( this.fieldExtractor.getValue( handle.getObject() ),
- handle,
+ return this.restriction.isAllowed( this.fieldExtractor.getValue( object ),
tuple,
workingMemory );
}
diff --git a/drools-core/src/main/java/org/drools/rule/ReturnValueRestriction.java b/drools-core/src/main/java/org/drools/rule/ReturnValueRestriction.java
index bf37691e79c..53f23a2e946 100644
--- a/drools-core/src/main/java/org/drools/rule/ReturnValueRestriction.java
+++ b/drools-core/src/main/java/org/drools/rule/ReturnValueRestriction.java
@@ -20,7 +20,6 @@
import org.drools.RuntimeDroolsException;
import org.drools.WorkingMemory;
-import org.drools.common.InternalFactHandle;
import org.drools.spi.Evaluator;
import org.drools.spi.FieldConstraint;
import org.drools.spi.FieldExtractor;
@@ -93,7 +92,6 @@ public Evaluator getEvaluator() {
}
public boolean isAllowed(final Object object,
- final InternalFactHandle handle,
final Tuple tuple,
final WorkingMemory workingMemory) {
try {
diff --git a/drools-core/src/main/java/org/drools/rule/VariableConstraint.java b/drools-core/src/main/java/org/drools/rule/VariableConstraint.java
index d8acc9e768e..6b138d7b516 100644
--- a/drools-core/src/main/java/org/drools/rule/VariableConstraint.java
+++ b/drools-core/src/main/java/org/drools/rule/VariableConstraint.java
@@ -17,7 +17,6 @@
*/
import org.drools.WorkingMemory;
-import org.drools.common.InternalFactHandle;
import org.drools.spi.Evaluator;
import org.drools.spi.FieldConstraint;
import org.drools.spi.FieldExtractor;
@@ -61,11 +60,10 @@ public Evaluator getEvaluator() {
return this.restriction.getEvaluator();
}
- public boolean isAllowed(final InternalFactHandle handle,
+ public boolean isAllowed(final Object object,
final Tuple tuple,
final WorkingMemory workingMemory) {
- return this.restriction.isAllowed( this.fieldExtractor.getValue( handle.getObject() ),
- handle,
+ return this.restriction.isAllowed( this.fieldExtractor.getValue( object ),
tuple,
workingMemory );
}
diff --git a/drools-core/src/main/java/org/drools/rule/VariableRestriction.java b/drools-core/src/main/java/org/drools/rule/VariableRestriction.java
index 574e18bae04..483c7e36b6f 100644
--- a/drools-core/src/main/java/org/drools/rule/VariableRestriction.java
+++ b/drools-core/src/main/java/org/drools/rule/VariableRestriction.java
@@ -19,7 +19,6 @@
import java.util.Arrays;
import org.drools.WorkingMemory;
-import org.drools.common.InternalFactHandle;
import org.drools.spi.Evaluator;
import org.drools.spi.FieldConstraint;
import org.drools.spi.FieldExtractor;
@@ -57,7 +56,6 @@ public Evaluator getEvaluator() {
}
public boolean isAllowed(final Object object,
- final InternalFactHandle handle,
final Tuple tuple,
final WorkingMemory workingMemory) {
return this.evaluator.evaluate( object,
diff --git a/drools-core/src/main/java/org/drools/spi/DataProvider.java b/drools-core/src/main/java/org/drools/spi/DataProvider.java
new file mode 100644
index 00000000000..d8ec24c4eb1
--- /dev/null
+++ b/drools-core/src/main/java/org/drools/spi/DataProvider.java
@@ -0,0 +1,13 @@
+package org.drools.spi;
+
+import java.util.Iterator;
+import java.util.List;
+
+import org.drools.rule.Declaration;
+
+public interface DataProvider {
+
+ public Declaration[] getRequiredDeclarations();
+
+ public Iterator getResults(Tuple tuple);
+}
diff --git a/drools-core/src/main/java/org/drools/spi/FieldConstraint.java b/drools-core/src/main/java/org/drools/spi/FieldConstraint.java
index db3b3bb67e2..b6382ce41a8 100644
--- a/drools-core/src/main/java/org/drools/spi/FieldConstraint.java
+++ b/drools-core/src/main/java/org/drools/spi/FieldConstraint.java
@@ -17,13 +17,12 @@
*/
import org.drools.WorkingMemory;
-import org.drools.common.InternalFactHandle;
import org.drools.rule.Declaration;
public interface FieldConstraint
extends
Constraint {
- public boolean isAllowed(InternalFactHandle handle,
+ public boolean isAllowed(Object object,
Tuple tuple,
WorkingMemory workingMemory);
diff --git a/drools-core/src/main/java/org/drools/spi/PredicateExpression.java b/drools-core/src/main/java/org/drools/spi/PredicateExpression.java
index 32b43c48525..e1b420a2bea 100644
--- a/drools-core/src/main/java/org/drools/spi/PredicateExpression.java
+++ b/drools-core/src/main/java/org/drools/spi/PredicateExpression.java
@@ -23,8 +23,8 @@
public interface PredicateExpression
extends
Invoker {
- public boolean evaluate(Tuple tuple,
- FactHandle factHandle,
+ public boolean evaluate(Object object,
+ Tuple tuple,
Declaration declaration,
Declaration[] requiredDeclarations,
WorkingMemory workingMemory) throws Exception;
diff --git a/drools-core/src/main/java/org/drools/spi/Restriction.java b/drools-core/src/main/java/org/drools/spi/Restriction.java
index 5a8db16e415..e90b1681c7e 100644
--- a/drools-core/src/main/java/org/drools/spi/Restriction.java
+++ b/drools-core/src/main/java/org/drools/spi/Restriction.java
@@ -3,14 +3,12 @@
import java.io.Serializable;
import org.drools.WorkingMemory;
-import org.drools.common.InternalFactHandle;
import org.drools.rule.Declaration;
public interface Restriction extends Serializable {
Declaration[] getRequiredDeclarations();
public boolean isAllowed(Object object,
- InternalFactHandle handle,
Tuple tuple,
WorkingMemory workingMemory);
}
diff --git a/drools-core/src/test/java/org/drools/reteoo/FromNodeTest.java b/drools-core/src/test/java/org/drools/reteoo/FromNodeTest.java
new file mode 100644
index 00000000000..f97eafda904
--- /dev/null
+++ b/drools-core/src/test/java/org/drools/reteoo/FromNodeTest.java
@@ -0,0 +1,423 @@
+package org.drools.reteoo;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+
+import org.drools.Cheese;
+import org.drools.FactHandle;
+import org.drools.RuleBaseFactory;
+import org.drools.base.ClassFieldExtractor;
+import org.drools.base.ClassObjectType;
+import org.drools.base.ValueType;
+import org.drools.base.evaluators.Operator;
+import org.drools.common.DefaultFactHandle;
+import org.drools.common.InternalFactHandle;
+import org.drools.common.PropagationContextImpl;
+import org.drools.rule.Column;
+import org.drools.rule.Declaration;
+import org.drools.rule.From;
+import org.drools.rule.LiteralConstraint;
+import org.drools.rule.VariableConstraint;
+import org.drools.spi.DataProvider;
+import org.drools.spi.Evaluator;
+import org.drools.spi.FieldConstraint;
+import org.drools.spi.FieldValue;
+import org.drools.spi.MockField;
+import org.drools.spi.PropagationContext;
+import org.drools.spi.Tuple;
+
+import junit.framework.TestCase;
+
+public class FromNodeTest extends TestCase {
+
+ public void testAlphaNode() {
+ final PropagationContext context = new PropagationContextImpl( 0,
+ PropagationContext.ASSERTION,
+ null,
+ null );
+ final ReteooWorkingMemory workingMemory = new ReteooWorkingMemory( 1,
+ (ReteooRuleBase) RuleBaseFactory.newRuleBase() );
+ final ClassFieldExtractor extractor = new ClassFieldExtractor( Cheese.class,
+ "type" );
+
+ final FieldValue field = new MockField( "stilton" );
+ final LiteralConstraint constraint = new LiteralConstraint( extractor,
+ ValueType.STRING_TYPE.getEvaluator( Operator.EQUAL ),
+ field );
+
+ List list = new ArrayList();
+ Cheese cheese1 = new Cheese( "cheddar",
+ 20 );
+ Cheese cheese2 = new Cheese( "brie",
+ 20 );
+ list.add( cheese1 );
+ list.add( cheese2 );
+ MockDataProvider dataProvider = new MockDataProvider( list );
+
+ FromNode from = new FromNode( 3,
+ dataProvider,
+ null,
+ new FieldConstraint[]{constraint},
+ null );
+ MockTupleSink sink = new MockTupleSink( 5 );
+ from.addTupleSink( sink );
+
+ Person person1 = new Person( "xxx1",
+ 30 );
+ FactHandle person1Handle = workingMemory.assertObject( person1 );
+ ReteTuple tuple1 = new ReteTuple( (DefaultFactHandle) person1Handle );
+ from.assertTuple( tuple1,
+ context,
+ workingMemory );
+
+ // nothing should be asserted, as cheese1 is cheddar and we are filtering on stilton
+ assertEquals( 0,
+ sink.getAsserted().size() );
+
+ //Set cheese1 to stilton and it should now propagate
+ cheese1.setType( "stilton" );
+ Person person2 = new Person( "xxx2",
+ 30 );
+ FactHandle person2Handle = workingMemory.assertObject( person2 );
+ ReteTuple tuple2 = new ReteTuple( (DefaultFactHandle) person2Handle );
+ from.assertTuple( tuple2,
+ context,
+ workingMemory );
+
+ List asserted = sink.getAsserted();
+ assertEquals( 1,
+ asserted.size() );
+ ReteTuple tuple = (ReteTuple) ((Object[]) asserted.get( 0 ))[0];
+ assertSame( person2,
+ tuple.getFactHandles()[0].getObject() );
+ assertSame( cheese1,
+ tuple.getFactHandles()[1].getObject() );
+
+ cheese2.setType( "stilton" );
+ Person person3 = new Person( "xxx2",
+ 30 );
+ FactHandle person3Handle = workingMemory.assertObject( person3 );
+ ReteTuple tuple3 = new ReteTuple( (DefaultFactHandle) person3Handle );
+ from.assertTuple( tuple3,
+ context,
+ workingMemory );
+
+ assertEquals( 3,
+ asserted.size() );
+ tuple = (ReteTuple) ((Object[]) asserted.get( 1 ))[0];
+ assertSame( person3,
+ tuple.getFactHandles()[0].getObject() );
+ assertSame( cheese1,
+ tuple.getFactHandles()[1].getObject() );
+ tuple = (ReteTuple) ((Object[]) asserted.get( 2 ))[0];
+ assertSame( person3,
+ tuple.getFactHandles()[0].getObject() );
+ assertSame( cheese2,
+ tuple.getFactHandles()[1].getObject() );
+
+ assertNotSame( cheese1,
+ cheese2 );
+ }
+
+ public void testBetaNode() {
+ final PropagationContext context = new PropagationContextImpl( 0,
+ PropagationContext.ASSERTION,
+ null,
+ null );
+
+ final ReteooWorkingMemory workingMemory = new ReteooWorkingMemory( 1,
+ (ReteooRuleBase) RuleBaseFactory.newRuleBase() );
+
+ final ClassFieldExtractor priceExtractor = new ClassFieldExtractor( Cheese.class,
+ "price" );
+
+ final ClassFieldExtractor ageExtractor = new ClassFieldExtractor( Person.class,
+ "age" );
+
+ Declaration declaration = new Declaration( "age",
+ ageExtractor,
+ 0 );
+
+ VariableConstraint variableConstraint = new VariableConstraint( priceExtractor,
+ declaration,
+ ValueType.INTEGER_TYPE.getEvaluator( Operator.EQUAL ) );
+
+ List list = new ArrayList();
+ Cheese cheese1 = new Cheese( "cheddar",
+ 18 );
+ Cheese cheese2 = new Cheese( "brie",
+ 12 );
+ list.add( cheese1 );
+ list.add( cheese2 );
+ MockDataProvider dataProvider = new MockDataProvider( list );
+
+ FromNode from = new FromNode( 3,
+ dataProvider,
+ null,
+ new FieldConstraint[]{variableConstraint},
+ null );
+ MockTupleSink sink = new MockTupleSink( 5 );
+ from.addTupleSink( sink );
+
+ Person person1 = new Person( "xxx1",
+ 30 );
+ FactHandle person1Handle = workingMemory.assertObject( person1 );
+ ReteTuple tuple1 = new ReteTuple( (DefaultFactHandle) person1Handle );
+ from.assertTuple( tuple1,
+ context,
+ workingMemory );
+
+ // nothing should be asserted, as cheese1 is cheddar and we are filtering on stilton
+ assertEquals( 0,
+ sink.getAsserted().size() );
+
+ //Set cheese1 to stilton and it should now propagate
+ cheese1.setPrice( 30 );
+ Person person2 = new Person( "xxx2",
+ 30 );
+ FactHandle person2Handle = workingMemory.assertObject( person2 );
+ ReteTuple tuple2 = new ReteTuple( (DefaultFactHandle) person2Handle );
+ from.assertTuple( tuple2,
+ context,
+ workingMemory );
+
+ List asserted = sink.getAsserted();
+ assertEquals( 1,
+ asserted.size() );
+ ReteTuple tuple = (ReteTuple) ((Object[]) asserted.get( 0 ))[0];
+ assertSame( person2,
+ tuple.getFactHandles()[0].getObject() );
+ assertSame( cheese1,
+ tuple.getFactHandles()[1].getObject() );
+
+ cheese2.setPrice( 30 );
+ Person person3 = new Person( "xxx2",
+ 30 );
+ FactHandle person3Handle = workingMemory.assertObject( person3 );
+ ReteTuple tuple3 = new ReteTuple( (DefaultFactHandle) person3Handle );
+ from.assertTuple( tuple3,
+ context,
+ workingMemory );
+
+ assertEquals( 3,
+ asserted.size() );
+ tuple = (ReteTuple) ((Object[]) asserted.get( 1 ))[0];
+ assertSame( person3,
+ tuple.getFactHandles()[0].getObject() );
+ assertSame( cheese1,
+ tuple.getFactHandles()[1].getObject() );
+ tuple = (ReteTuple) ((Object[]) asserted.get( 2 ))[0];
+ assertSame( person3,
+ tuple.getFactHandles()[0].getObject() );
+ assertSame( cheese2,
+ tuple.getFactHandles()[1].getObject() );
+
+ assertNotSame( cheese1,
+ cheese2 );
+ }
+
+ public void testRestract() {
+ final PropagationContext context = new PropagationContextImpl( 0,
+ PropagationContext.ASSERTION,
+ null,
+ null );
+ final ReteooWorkingMemory workingMemory = new ReteooWorkingMemory( 1,
+ (ReteooRuleBase) RuleBaseFactory.newRuleBase() );
+ final ClassFieldExtractor extractor = new ClassFieldExtractor( Cheese.class,
+ "type" );
+
+ final FieldValue field = new MockField( "stilton" );
+ final LiteralConstraint constraint = new LiteralConstraint( extractor,
+ ValueType.STRING_TYPE.getEvaluator( Operator.EQUAL ),
+ field );
+
+ List list = new ArrayList();
+ Cheese cheese1 = new Cheese( "stilton",
+ 5 );
+ Cheese cheese2 = new Cheese( "stilton",
+ 15 );
+ list.add( cheese1 );
+ list.add( cheese2 );
+ MockDataProvider dataProvider = new MockDataProvider( list );
+
+ FromNode from = new FromNode( 3,
+ dataProvider,
+ null,
+ new FieldConstraint[]{constraint},
+ null );
+ MockTupleSink sink = new MockTupleSink( 5 );
+ from.addTupleSink( sink );
+
+ List asserted = sink.getAsserted();
+
+ Person person1 = new Person( "xxx2",
+ 30 );
+ FactHandle person1Handle = workingMemory.assertObject( person1 );
+ ReteTuple tuple = new ReteTuple( (DefaultFactHandle) person1Handle );
+ from.assertTuple( tuple,
+ context,
+ workingMemory );
+
+ assertEquals( 2,
+ asserted.size() );
+
+ BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( from );
+ assertEquals( 1,
+ memory.getLeftTupleMemory().size() );
+ assertEquals( 0,
+ memory.getRightObjectMemory().size() );
+ assertEquals( 2,
+ tuple.getTupleMatches().size() );
+
+ list = new ArrayList();
+ for ( Iterator it = tuple.getTupleMatches().values().iterator(); it.hasNext(); ) {
+ TupleMatch tupleMatch = (TupleMatch) it.next();
+ list.add( tupleMatch.getObjectMatches().getFactHandle().getObject() );
+ }
+ assertEquals( 2,
+ list.size() );
+ assertTrue( list.contains( cheese1 ) );
+ assertTrue( list.contains( cheese2 ) );
+
+ from.retractTuple( tuple,
+ context,
+ workingMemory );
+ assertEquals( 0,
+ memory.getLeftTupleMemory().size() );
+ assertEquals( 0,
+ memory.getRightObjectMemory().size() );
+ }
+
+ public void testModify() {
+ final PropagationContext context = new PropagationContextImpl( 0,
+ PropagationContext.ASSERTION,
+ null,
+ null );
+ final ReteooWorkingMemory workingMemory = new ReteooWorkingMemory( 1,
+ (ReteooRuleBase) RuleBaseFactory.newRuleBase() );
+ final ClassFieldExtractor extractor = new ClassFieldExtractor( Cheese.class,
+ "type" );
+
+ final FieldValue field = new MockField( "stilton" );
+ final LiteralConstraint constraint = new LiteralConstraint( extractor,
+ ValueType.STRING_TYPE.getEvaluator( Operator.EQUAL ),
+ field );
+
+ List list = new ArrayList();
+ Cheese cheese1 = new Cheese( "cheddar",
+ 20 );
+ Cheese cheese2 = new Cheese( "brie",
+ 20 );
+ list.add( cheese1 );
+ list.add( cheese2 );
+ MockDataProvider dataProvider = new MockDataProvider( list );
+
+ FromNode from = new FromNode( 3,
+ dataProvider,
+ null,
+ new FieldConstraint[]{constraint},
+ null );
+ MockTupleSink sink = new MockTupleSink( 5 );
+ from.addTupleSink( sink );
+
+ Person person1 = new Person( "xxx1",
+ 30 );
+ FactHandle person1Handle = workingMemory.assertObject( person1 );
+ ReteTuple tuple1 = new ReteTuple( (DefaultFactHandle) person1Handle );
+ from.assertTuple( tuple1,
+ context,
+ workingMemory );
+
+ // nothing should be asserted, as cheese1 is cheddar and we are filtering on stilton
+ assertEquals( 0,
+ sink.getAsserted().size() );
+
+ //Set cheese1 to stilton and it should now propagate
+ cheese1.setType( "stilton" );
+ from.modifyTuple( tuple1,
+ context,
+ workingMemory );
+ List asserted = sink.getAsserted();
+ assertEquals( 1,
+ asserted.size() );
+ ReteTuple tuple = (ReteTuple) ((Object[]) asserted.get( 0 ))[0];
+ assertSame( person1,
+ tuple.getFactHandles()[0].getObject() );
+ assertSame( cheese1,
+ tuple.getFactHandles()[1].getObject() );
+
+ cheese2.setType( "stilton" );
+ from.modifyTuple( tuple1,
+ context,
+ workingMemory );
+
+ // A modify when using from involves a retract and an assert - so make sure there was a retraction and no modify propagations
+ assertEquals( 0 , sink.getModified().size() );
+ assertEquals( 1, sink.getRetracted().size() );
+
+ assertEquals( 3,
+ asserted.size() );
+ tuple = (ReteTuple) ((Object[]) asserted.get( 1 ))[0];
+ assertSame( person1,
+ tuple.getFactHandles()[0].getObject() );
+ assertSame( cheese1,
+ tuple.getFactHandles()[1].getObject() );
+
+ tuple = (ReteTuple) ((Object[]) asserted.get( 2 ))[0];
+ assertSame( person1,
+ tuple.getFactHandles()[0].getObject() );
+ assertSame( cheese2,
+ tuple.getFactHandles()[1].getObject() );
+
+ // Double check the nodes memory
+ BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( from );
+ assertEquals( 1,
+ memory.getLeftTupleMemory().size() );
+ assertEquals( 0,
+ memory.getRightObjectMemory().size() );
+ assertEquals( 2,
+ tuple1.getTupleMatches().size() );
+ }
+
+ public static class MockDataProvider
+ implements
+ DataProvider {
+
+ private Collection collection;
+
+ public Declaration[] getRequiredDeclarations() {
+ return null;
+ }
+
+ public MockDataProvider(Collection collection) {
+ this.collection = collection;
+ }
+
+ public Iterator getResults(Tuple tuple) {
+ return this.collection.iterator();
+ }
+ }
+
+ public static class Person {
+ private String name;
+ private int age;
+
+ public Person(String name,
+ int age) {
+ super();
+ this.name = name;
+ this.age = age;
+ }
+
+ public int getAge() {
+ return age;
+ }
+
+ public String getName() {
+ return name;
+ }
+ }
+}
diff --git a/drools-core/src/test/java/org/drools/reteoo/MockTupleSource.java b/drools-core/src/test/java/org/drools/reteoo/MockTupleSource.java
index 4a7f82b6189..2d0b59099d4 100644
--- a/drools-core/src/test/java/org/drools/reteoo/MockTupleSource.java
+++ b/drools-core/src/test/java/org/drools/reteoo/MockTupleSource.java
@@ -38,7 +38,6 @@ public MockTupleSource(final int id) {
public void attach() {
this.attached++;
-
}
public int getAttached() {
diff --git a/drools-core/src/test/java/org/drools/rule/FieldConstraintTest.java b/drools-core/src/test/java/org/drools/rule/FieldConstraintTest.java
index 0e7eafd6cb4..bdba597c349 100644
--- a/drools-core/src/test/java/org/drools/rule/FieldConstraintTest.java
+++ b/drools-core/src/test/java/org/drools/rule/FieldConstraintTest.java
@@ -78,7 +78,7 @@ public void testLiteralConstraint() throws IntrospectionException {
final InternalFactHandle cheddarHandle = (InternalFactHandle) workingMemory.assertObject( cheddar );
// check constraint
- assertTrue( constraint.isAllowed( cheddarHandle,
+ assertTrue( constraint.isAllowed( cheddarHandle.getObject(),
null,
workingMemory ) );
@@ -88,7 +88,7 @@ public void testLiteralConstraint() throws IntrospectionException {
final InternalFactHandle stiltonHandle = (InternalFactHandle) workingMemory.assertObject( stilton );
// check constraint
- assertFalse( constraint.isAllowed( stiltonHandle,
+ assertFalse( constraint.isAllowed( stiltonHandle.getObject(),
null,
workingMemory ) );
}
@@ -131,13 +131,13 @@ public void testPredicateConstraint() throws IntrospectionException {
*/
private static final long serialVersionUID = -7805842671538257493L;
- public boolean evaluate(Tuple tuple,
- FactHandle factHandle,
+ public boolean evaluate(Object object,
+ Tuple tuple,
Declaration declaration,
Declaration[] declarations,
WorkingMemory workingMemory) {
int price1 = ((Integer) declarations[0].getValue( workingMemory.getObject( tuple.get( declarations[0] ) ) )).intValue();
- int price2 = ((Integer) declaration.getValue( workingMemory.getObject( factHandle ) )).intValue();
+ int price2 = ((Integer) declaration.getValue( object )).intValue();
return (price2 == (price1 * 2));
@@ -160,7 +160,7 @@ public boolean evaluate(Tuple tuple,
tuple = new InstrumentedReteTuple( tuple,
f1 );
- assertTrue( constraint1.isAllowed( f1,
+ assertTrue( constraint1.isAllowed( f1.getObject(),
tuple,
workingMemory ) );
}
@@ -228,11 +228,11 @@ public Object evaluate(Tuple tuple, // ?price
tuple = new InstrumentedReteTuple( tuple,
f1 );
- assertTrue( constraint1.isAllowed( f1,
+ assertTrue( constraint1.isAllowed( f1.getObject(),
tuple,
workingMemory ) );
- assertFalse( constraint2.isAllowed( f1,
+ assertFalse( constraint2.isAllowed( f1.getObject(),
tuple,
workingMemory ) );
@@ -241,7 +241,7 @@ public Object evaluate(Tuple tuple, // ?price
final InternalFactHandle f2 = (InternalFactHandle) workingMemory.assertObject( cheddar2 );
- assertTrue( constraint2.isAllowed( f2,
+ assertTrue( constraint2.isAllowed( f2.getObject(),
tuple,
workingMemory ) );
}
diff --git a/drools-core/src/test/java/org/drools/spi/MockConstraint.java b/drools-core/src/test/java/org/drools/spi/MockConstraint.java
index c9fd203b045..7cc2f2a23e6 100644
--- a/drools-core/src/test/java/org/drools/spi/MockConstraint.java
+++ b/drools-core/src/test/java/org/drools/spi/MockConstraint.java
@@ -17,7 +17,6 @@
*/
import org.drools.WorkingMemory;
-import org.drools.common.InternalFactHandle;
import org.drools.rule.Declaration;
public class MockConstraint
@@ -33,7 +32,7 @@ public class MockConstraint
public boolean isAllowed = true;
- public boolean isAllowed(final InternalFactHandle handle,
+ public boolean isAllowed(final Object object,
final Tuple tuple,
final WorkingMemory workingMemory) {
return this.isAllowed;
|
71566c837be621b249f47f3d0b525e744238bf02
|
drools
|
Added ScannerChangeSetTest which scans CSV resource- file for changes.--
|
p
|
https://github.com/kiegroup/drools
|
diff --git a/drools-core/src/main/java/org/drools/core/util/FileManager.java b/drools-core/src/main/java/org/drools/core/util/FileManager.java
index 47675356b61..3570356da39 100644
--- a/drools-core/src/main/java/org/drools/core/util/FileManager.java
+++ b/drools-core/src/main/java/org/drools/core/util/FileManager.java
@@ -19,9 +19,11 @@
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
+import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
+import java.io.InputStreamReader;
import java.io.StringReader;
import java.util.UUID;
@@ -192,5 +194,19 @@ public File write(String path,
return f;
}
+
+ public String readInputStreamReaderAsString(InputStreamReader in) throws IOException {
+ StringBuffer fileData = new StringBuffer(1000);
+ BufferedReader reader = new BufferedReader(in);
+ char[] buf = new char[1024];
+ int numRead = 0;
+ while ((numRead = reader.read(buf)) != -1) {
+ String readData = String.valueOf(buf, 0, numRead);
+ fileData.append(readData);
+ buf = new char[1024];
+ }
+ reader.close();
+ return fileData.toString();
+ }
}
diff --git a/drools-decisiontables/src/test/java/org/drools/decisiontable/ScannerChangeSetTest.java b/drools-decisiontables/src/test/java/org/drools/decisiontable/ScannerChangeSetTest.java
new file mode 100644
index 00000000000..08f3d61994a
--- /dev/null
+++ b/drools-decisiontables/src/test/java/org/drools/decisiontable/ScannerChangeSetTest.java
@@ -0,0 +1,94 @@
+package org.drools.decisiontable;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStreamReader;
+
+import org.drools.KnowledgeBase;
+import org.drools.agent.KnowledgeAgent;
+import org.drools.agent.KnowledgeAgentFactory;
+import org.drools.core.util.FileManager;
+import org.drools.io.ResourceChangeScannerConfiguration;
+import org.drools.io.ResourceFactory;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class ScannerChangeSetTest {
+
+ FileManager fileManager;
+
+ @Before
+ public void setUp() throws Exception {
+ fileManager = new FileManager();
+ fileManager.setUp();
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ fileManager.tearDown();
+ }
+
+ @Test
+ public void testCSVByResourceChangeScanner() throws InterruptedException,
+ IOException {
+
+ // load contents of resource decision tables
+ String first = fileManager
+ .readInputStreamReaderAsString(new InputStreamReader(getClass()
+ .getResourceAsStream("changeSetTestCSV.csv")));
+ String second = fileManager
+ .readInputStreamReaderAsString(new InputStreamReader(getClass()
+ .getResourceAsStream("changeSetTestCSV2.csv")));
+ System.out.println(first);
+ System.out.println(second);
+
+ // write first version of the decision table rules
+ File file = new File(
+ "src/test/resources/org/drools/decisiontable/scannerChangeSetTestCSV.csv");
+ file.delete();
+ fileManager.write(file, first);
+ Thread.sleep(1100);
+
+ // start scanning service with interval 1s
+ ResourceChangeScannerConfiguration config = ResourceFactory
+ .getResourceChangeScannerService()
+ .newResourceChangeScannerConfiguration();
+ config.setProperty("drools.resource.scanner.interval", "1");
+ ResourceFactory.getResourceChangeScannerService().configure(config);
+ ResourceFactory.getResourceChangeNotifierService().start();
+ ResourceFactory.getResourceChangeScannerService().start();
+
+ // load knowledge base via knowledge agent
+ KnowledgeAgent kagent = KnowledgeAgentFactory
+ .newKnowledgeAgent("csv agent");
+ kagent.applyChangeSet(ResourceFactory.newClassPathResource(
+ "scannerChangeSetTestCSV.xml", getClass()));
+ KnowledgeBase kbase = kagent.getKnowledgeBase();
+
+ assertEquals(1, kbase.getKnowledgePackages().size());
+ assertEquals(3, kbase.getKnowledgePackages().iterator().next()
+ .getRules().size());
+
+ // after some waiting we change number of rules in decision table,
+ // scanner should notice the change
+ Thread.sleep(1100);
+ file.delete();
+ fileManager.write(file, second);
+ Thread.sleep(1100);
+
+ try {
+ kbase = kagent.getKnowledgeBase();
+ // fails here - see surefire report, knowledge agent fails to load the change
+ assertEquals(1, kbase.getKnowledgePackages().size());
+ assertEquals(2, kbase.getKnowledgePackages().iterator().next()
+ .getRules().size());
+ } finally {
+ ResourceFactory.getResourceChangeNotifierService().stop();
+ ResourceFactory.getResourceChangeScannerService().stop();
+ file.delete();
+ }
+ }
+}
diff --git a/drools-decisiontables/src/test/resources/org/drools/decisiontable/changeSetTestCSV2.csv b/drools-decisiontables/src/test/resources/org/drools/decisiontable/changeSetTestCSV2.csv
new file mode 100644
index 00000000000..8008cef8f5c
--- /dev/null
+++ b/drools-decisiontables/src/test/resources/org/drools/decisiontable/changeSetTestCSV2.csv
@@ -0,0 +1,12 @@
+,
+"RuleSet","org.drools.decisiontable"
+"Import","org.drools.decisiontable.Person"
+"Notes",
+,
+"RuleTable Age change",
+"CONDITION","ACTION"
+"person:Person","person"
+"age == $param","setAge($param)"
+"age","new Age"
+0,1
+1,2
diff --git a/drools-decisiontables/src/test/resources/org/drools/decisiontable/scannerChangeSetTestCSV.xml b/drools-decisiontables/src/test/resources/org/drools/decisiontable/scannerChangeSetTestCSV.xml
new file mode 100644
index 00000000000..e6cb761a1e9
--- /dev/null
+++ b/drools-decisiontables/src/test/resources/org/drools/decisiontable/scannerChangeSetTestCSV.xml
@@ -0,0 +1,10 @@
+<change-set xmlns='http://drools.org/drools-5.0/change-set'
+ xmlns:xs='http://www.w3.org/2001/XMLSchema-instance'
+ xs:schemaLocation='http://drools.org/drools-5.0/change-set http://anonsvn.jboss.org/repos/labs/labs/jbossrules/trunk/drools-api/src/main/resources/change-set-1.0.0.xsd'>
+
+ <add>
+ <resource source="classpath:org/drools/decisiontable/scannerChangeSetTestCSV.csv" type="DTABLE">
+ <decisiontable-conf input-type="CSV" worksheet-name="Tables"/>
+ </resource>
+ </add>
+</change-set>
|
e2ffe19d36e25a9d208e53a534f878e8605ed5ab
|
intellij-community
|
cleanup--
|
p
|
https://github.com/JetBrains/intellij-community
|
diff --git a/java/compiler/impl/src/com/intellij/compiler/server/BuildManager.java b/java/compiler/impl/src/com/intellij/compiler/server/BuildManager.java
index a34084fa28eaf..6fb2c80e3d801 100644
--- a/java/compiler/impl/src/com/intellij/compiler/server/BuildManager.java
+++ b/java/compiler/impl/src/com/intellij/compiler/server/BuildManager.java
@@ -137,7 +137,7 @@ public Boolean fun(String s) {
private final CompileServerClasspathManager myClasspathManager = new CompileServerClasspathManager();
private final Executor myPooledThreadExecutor = new Executor() {
@Override
- public void execute(Runnable command) {
+ public void execute(@NotNull Runnable command) {
ApplicationManager.getApplication().executeOnPooledThread(command);
}
};
@@ -478,7 +478,7 @@ public void run() {
globals = buildGlobalSettings();
myGlobals = globals;
}
- CmdlineRemoteProto.Message.ControllerMessage.FSEvent currentFSChanges = null;
+ CmdlineRemoteProto.Message.ControllerMessage.FSEvent currentFSChanges;
final SequentialTaskExecutor projectTaskQueue;
synchronized (myProjectDataMap) {
ProjectData data = myProjectDataMap.get(projectPath);
@@ -761,21 +761,12 @@ private Process launchBuildProcess(Project project, final int port, final UUID s
cmdLine.addParameter("-D"+ GlobalOptions.HOSTNAME_OPTION + "=" + host);
// javac's VM should use the same default locale that IDEA uses in order for javac to print messages in 'correct' language
- final String lang = System.getProperty("user.language");
- if (lang != null) {
- //noinspection HardCodedStringLiteral
- cmdLine.addParameter("-Duser.language=" + lang);
- }
- final String country = System.getProperty("user.country");
- if (country != null) {
- //noinspection HardCodedStringLiteral
- cmdLine.addParameter("-Duser.country=" + country);
- }
- //noinspection HardCodedStringLiteral
- final String region = System.getProperty("user.region");
- if (region != null) {
- //noinspection HardCodedStringLiteral
- cmdLine.addParameter("-Duser.region=" + region);
+ String[] propertyNames = {"user.language", "user.country", "user.region"};
+ for (String name : propertyNames) {
+ final String value = System.getProperty(name);
+ if (value != null) {
+ cmdLine.addParameter("-D" + name + "=" + value);
+ }
}
cmdLine.addParameter("-classpath");
diff --git a/jps/jps-builders/src/org/jetbrains/jps/api/CmdlineProtoUtil.java b/jps/jps-builders/src/org/jetbrains/jps/api/CmdlineProtoUtil.java
index 38d0aa113a458..ac843603c68b7 100644
--- a/jps/jps-builders/src/org/jetbrains/jps/api/CmdlineProtoUtil.java
+++ b/jps/jps-builders/src/org/jetbrains/jps/api/CmdlineProtoUtil.java
@@ -22,7 +22,7 @@ public static CmdlineRemoteProto.Message.ControllerMessage createMakeRequest(Str
List<TargetTypeBuildScope> scopes,
final Map<String, String> userData,
final CmdlineRemoteProto.Message.ControllerMessage.GlobalSettings globals,
- final CmdlineRemoteProto.Message.ControllerMessage.FSEvent event) {
+ final @Nullable CmdlineRemoteProto.Message.ControllerMessage.FSEvent event) {
return createBuildParametersMessage(CmdlineRemoteProto.Message.ControllerMessage.ParametersMessage.Type.MAKE, project, scopes,
userData, Collections.<String>emptyList(),
globals, event);
@@ -33,7 +33,7 @@ public static CmdlineRemoteProto.Message.ControllerMessage createForceCompileReq
Collection<String> paths,
final Map<String, String> userData,
final CmdlineRemoteProto.Message.ControllerMessage.GlobalSettings globals,
- final CmdlineRemoteProto.Message.ControllerMessage.FSEvent event) {
+ final @Nullable CmdlineRemoteProto.Message.ControllerMessage.FSEvent event) {
return createBuildParametersMessage(CmdlineRemoteProto.Message.ControllerMessage.ParametersMessage.Type.FORCED_COMPILATION, project,
scopes, userData, paths, globals, event);
}
@@ -63,7 +63,7 @@ public static TargetTypeBuildScope createAllTargetsScope(BuildTargetType<?> type
private static CmdlineRemoteProto.Message.ControllerMessage createBuildParametersMessage(CmdlineRemoteProto.Message.ControllerMessage.ParametersMessage.Type buildType,
String project,
- List<CmdlineRemoteProto.Message.ControllerMessage.ParametersMessage.TargetTypeBuildScope> scopes,
+ List<TargetTypeBuildScope> scopes,
Map<String, String> userData,
Collection<String> paths,
final CmdlineRemoteProto.Message.ControllerMessage.GlobalSettings globals,
@@ -99,7 +99,7 @@ public static CmdlineRemoteProto.Message.KeyValuePair createPair(String key, Str
}
- public static CmdlineRemoteProto.Message.Failure createFailure(String description, Throwable cause) {
+ public static CmdlineRemoteProto.Message.Failure createFailure(String description, @Nullable Throwable cause) {
final CmdlineRemoteProto.Message.Failure.Builder builder = CmdlineRemoteProto.Message.Failure.newBuilder();
builder.setDescription(description);
if (cause != null) {
|
2f40ce3e2a5e244315c16865967f58930f57cf7c
|
ReactiveX-RxJava
|
Added takeLast to Observable--
|
a
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/Observable.java b/rxjava-core/src/main/java/rx/Observable.java
index 0df09cb3c7..d234a73ba2 100644
--- a/rxjava-core/src/main/java/rx/Observable.java
+++ b/rxjava-core/src/main/java/rx/Observable.java
@@ -1347,6 +1347,22 @@ public static <T> Observable<T> take(final Observable<T> items, final int num) {
return _create(OperationTake.take(items, num));
}
+ /**
+ * Returns an Observable that emits the last <code>count</code> items emitted by the source
+ * Observable.
+ *
+ * @param items
+ * the source Observable
+ * @param count
+ * the number of items from the end of the sequence emitted by the source
+ * Observable to emit
+ * @return an Observable that only emits the last <code>count</code> items emitted by the source
+ * Observable
+ */
+ public static <T> Observable<T> takeLast(final Observable<T> items, final int count) {
+ return _create(OperationTakeLast.takeLast(items, count));
+ }
+
/**
* Returns an Observable that emits a single item, a list composed of all the items emitted by
* the source Observable.
@@ -2235,6 +2251,20 @@ public Observable<T> take(final int num) {
return take(this, num);
}
+ /**
+ * Returns an Observable that emits the last <code>count</code> items emitted by the source
+ * Observable.
+ *
+ * @param count
+ * the number of items from the end of the sequence emitted by the source
+ * Observable to emit
+ * @return an Observable that only emits the last <code>count</code> items emitted by the source
+ * Observable
+ */
+ public Observable<T> takeLast(final int count) {
+ return takeLast(this, count);
+ }
+
/**
* Returns an Observable that emits a single item, a list composed of all the items emitted by
* the source Observable.
diff --git a/rxjava-core/src/main/java/rx/operators/OperationTakeLast.java b/rxjava-core/src/main/java/rx/operators/OperationTakeLast.java
index a3116dea58..57e336739e 100644
--- a/rxjava-core/src/main/java/rx/operators/OperationTakeLast.java
+++ b/rxjava-core/src/main/java/rx/operators/OperationTakeLast.java
@@ -23,13 +23,14 @@
import rx.util.functions.Func1;
import java.util.Iterator;
-import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.LinkedBlockingDeque;
-import java.util.concurrent.atomic.AtomicInteger;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.*;
+/**
+ * Returns a specified number of contiguous elements from the end of an observable sequence.
+ */
public final class OperationTakeLast {
public static <T> Func1<Observer<T>, Subscription> takeLast(final Observable<T> items, final int count) {
|
d1c59faff700059d52ef350c3e7741dd5abcb65a
|
drools
|
JBRULES-1498 Thead safe partitioning of- WorkingMemory entry points--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@18794 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
p
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/StreamsTest.java b/drools-compiler/src/test/java/org/drools/integrationtests/StreamsTest.java
index e05ddcb00c0..75742021cc5 100644
--- a/drools-compiler/src/test/java/org/drools/integrationtests/StreamsTest.java
+++ b/drools-compiler/src/test/java/org/drools/integrationtests/StreamsTest.java
@@ -24,7 +24,7 @@
import java.util.List;
import org.drools.ClockType;
-import org.drools.EntryPointInterface;
+import org.drools.WorkingMemoryEntryPoint;
import org.drools.RuleBase;
import org.drools.RuleBaseConfiguration;
import org.drools.RuleBaseFactory;
@@ -159,7 +159,7 @@ public void testEventAssertion() throws Exception {
50,
System.currentTimeMillis() );
- EntryPointInterface entry = wm.getEntryPoint( "StockStream" );
+ WorkingMemoryEntryPoint entry = wm.getWorkingMemoryEntryPoint( "StockStream" );
InternalFactHandle handle5 = (InternalFactHandle) entry.insert( tick5 );
InternalFactHandle handle6 = (InternalFactHandle) entry.insert( tick6 );
diff --git a/drools-compiler/src/test/java/org/drools/testframework/MockWorkingMemory.java b/drools-compiler/src/test/java/org/drools/testframework/MockWorkingMemory.java
index 127b5829f0c..a3a5e83ff4e 100644
--- a/drools-compiler/src/test/java/org/drools/testframework/MockWorkingMemory.java
+++ b/drools-compiler/src/test/java/org/drools/testframework/MockWorkingMemory.java
@@ -9,7 +9,7 @@
import java.util.concurrent.locks.Lock;
import org.drools.Agenda;
-import org.drools.EntryPointInterface;
+import org.drools.WorkingMemoryEntryPoint;
import org.drools.FactException;
import org.drools.FactHandle;
import org.drools.ObjectFilter;
@@ -458,7 +458,7 @@ public Map<Object, ObjectTypeConf> getObjectTypeConfMap(EntryPoint entryPoint) {
return null;
}
- public EntryPointInterface getEntryPoint(String id) {
+ public WorkingMemoryEntryPoint getWorkingMemoryEntryPoint(String id) {
// TODO Auto-generated method stub
return null;
}
|
f0f7133a3ae5cff39e4f13643c346fe3b234a8ac
|
camel
|
CAMEL-751 fixed the spring configuration url- error--git-svn-id: https://svn.apache.org/repos/asf/activemq/camel/trunk@679379 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/camel
|
diff --git a/components/camel-spring/src/test/java/org/apache/camel/spring/config/ErrorHandlerTest.java b/components/camel-spring/src/test/java/org/apache/camel/spring/config/ErrorHandlerTest.java
index a0b3593b42a5c..1e9da2e0d1b90 100644
--- a/components/camel-spring/src/test/java/org/apache/camel/spring/config/ErrorHandlerTest.java
+++ b/components/camel-spring/src/test/java/org/apache/camel/spring/config/ErrorHandlerTest.java
@@ -30,7 +30,7 @@
public class ErrorHandlerTest extends SpringTestSupport {
protected ClassPathXmlApplicationContext createApplicationContext() {
- return new ClassPathXmlApplicationContext("org/apache/camel/spring/errorHandler.xml");
+ return new ClassPathXmlApplicationContext("org/apache/camel/spring/config/errorHandler.xml");
}
public void testEndpointConfiguration() throws Exception {
|
86f43189eb607c39694441af9799f93f3bf5fc4f
|
camel
|
CAMEL-2180: Do not use copy of exchange when- processing in doCatch.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@881175 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/main/java/org/apache/camel/processor/TryProcessor.java b/camel-core/src/main/java/org/apache/camel/processor/TryProcessor.java
index 8cfdae8c3dc9f..594eba6c227a6 100644
--- a/camel-core/src/main/java/org/apache/camel/processor/TryProcessor.java
+++ b/camel-core/src/main/java/org/apache/camel/processor/TryProcessor.java
@@ -72,7 +72,7 @@ public void process(Exchange exchange) throws Exception {
exchange.setException(e);
}
- // handle any exception occured during the try processor
+ // handle any exception occurred during the try processor
try {
if (e != null) {
handleException(exchange, e);
@@ -104,17 +104,14 @@ protected void handleException(Exchange exchange, Throwable e) throws Exception
LOG.trace("This TryProcessor catches the exception: " + caught.getClass().getName() + " caused by: " + e.getMessage());
}
- // TODO: No need to make a copy
- // lets attach the exception to the exchange
- Exchange localExchange = exchange.copy();
-
- localExchange.setProperty(Exchange.EXCEPTION_CAUGHT, caught);
// give the rest of the pipeline another chance
- localExchange.setException(null);
+ exchange.setProperty(Exchange.EXCEPTION_CAUGHT, caught);
+ exchange.setException(null);
// do not catch any exception here, let it propagate up
- catchClause.process(localExchange);
+ catchClause.process(exchange);
+ // is the exception handled by the catch clause
boolean handled = catchClause.handles(exchange);
if (LOG.isDebugEnabled()) {
@@ -124,13 +121,11 @@ protected void handleException(Exchange exchange, Throwable e) throws Exception
if (!handled) {
// put exception back as it was not handled
- if (localExchange.getException() == null) {
- localExchange.setException(localExchange.getProperty(Exchange.EXCEPTION_CAUGHT, Exception.class));
+ if (exchange.getException() == null) {
+ exchange.setException(exchange.getProperty(Exchange.EXCEPTION_CAUGHT, Exception.class));
}
}
- // copy result back to the original exchange
- ExchangeHelper.copyResults(exchange, localExchange);
return;
}
}
|
c4691583426b92eaee0c64deaa088301acdce4c2
|
drools
|
JBRULES-1805: fixing multithread bug--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@23501 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-core/src/main/java/org/drools/base/EnabledBoolean.java b/drools-core/src/main/java/org/drools/base/EnabledBoolean.java
new file mode 100644
index 00000000000..e375d881169
--- /dev/null
+++ b/drools-core/src/main/java/org/drools/base/EnabledBoolean.java
@@ -0,0 +1,52 @@
+package org.drools.base;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+
+import org.drools.WorkingMemory;
+import org.drools.spi.Enabled;
+import org.drools.spi.Tuple;
+
+public class EnabledBoolean
+ implements
+ Enabled,
+ Externalizable {
+
+ /**
+ *
+ */
+ private static final long serialVersionUID = 400L;
+
+ public static final Enabled ENABLED_TRUE = new EnabledBoolean( true );
+ public static final Enabled ENABLED_FALSE = new EnabledBoolean( false );
+
+ private boolean value;
+
+ public EnabledBoolean() {
+ }
+
+ public EnabledBoolean(boolean value) {
+ this.value = value;
+ }
+
+ public void readExternal(ObjectInput in) throws IOException,
+ ClassNotFoundException {
+ value = in.readBoolean();
+ }
+
+ public void writeExternal(ObjectOutput out) throws IOException {
+ out.writeBoolean( value );
+ }
+
+ public boolean getValue(final Tuple tuple,
+ final WorkingMemory workingMemory) {
+ return this.value;
+ }
+
+ public String toString() {
+ return String.valueOf( this.value );
+ }
+
+}
diff --git a/drools-core/src/main/java/org/drools/base/mvel/MVELEnabledExpression.java b/drools-core/src/main/java/org/drools/base/mvel/MVELEnabledExpression.java
new file mode 100644
index 00000000000..606a1941e34
--- /dev/null
+++ b/drools-core/src/main/java/org/drools/base/mvel/MVELEnabledExpression.java
@@ -0,0 +1,76 @@
+package org.drools.base.mvel;
+
+import java.io.Externalizable;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+import java.io.Serializable;
+
+import org.drools.WorkingMemory;
+import org.drools.rule.MVELDialectRuntimeData;
+import org.drools.rule.Package;
+import org.drools.spi.Enabled;
+import org.drools.spi.Tuple;
+import org.mvel.MVEL;
+
+public class MVELEnabledExpression
+ implements
+ Enabled,
+ MVELCompileable,
+ Externalizable {
+
+ private static final long serialVersionUID = 400L;
+
+ private MVELCompilationUnit unit;
+ private String id;
+
+ private Serializable expr;
+ private DroolsMVELFactory prototype;
+
+ public MVELEnabledExpression() {
+ }
+
+ public MVELEnabledExpression(final MVELCompilationUnit unit,
+ final String id) {
+ this.unit = unit;
+ this.id = id;
+ }
+
+ public void readExternal(ObjectInput in) throws IOException,
+ ClassNotFoundException {
+ unit = (MVELCompilationUnit) in.readObject();
+ id = in.readUTF();
+ }
+
+ public void writeExternal(ObjectOutput out) throws IOException {
+ out.writeObject( unit );
+ out.writeUTF( id );
+ }
+
+ public void compile(ClassLoader classLoader) {
+ expr = unit.getCompiledExpression( classLoader );
+ prototype = unit.getFactory();
+ }
+
+ public boolean getValue(final Tuple tuple,
+ final WorkingMemory workingMemory) {
+ // it must be cloned for multi-thread safety
+ DroolsMVELFactory factory = (DroolsMVELFactory) this.prototype.clone();
+ factory.setContext( tuple,
+ null,
+ null,
+ workingMemory,
+ null );
+
+ // do we have any functions for this namespace?
+ Package pkg = workingMemory.getRuleBase().getPackage( "MAIN" );
+ if ( pkg != null ) {
+ MVELDialectRuntimeData data = (MVELDialectRuntimeData) pkg.getDialectRuntimeRegistry().getDialectData( this.id );
+ factory.setNextFactory( data.getFunctionFactory() );
+ }
+
+ return ((Boolean) MVEL.executeExpression( this.expr,
+ factory )).booleanValue();
+ }
+
+}
diff --git a/drools-core/src/main/java/org/drools/base/mvel/MVELSalienceExpression.java b/drools-core/src/main/java/org/drools/base/mvel/MVELSalienceExpression.java
index 30ac7c5abf1..e61f9cb0eb2 100644
--- a/drools-core/src/main/java/org/drools/base/mvel/MVELSalienceExpression.java
+++ b/drools-core/src/main/java/org/drools/base/mvel/MVELSalienceExpression.java
@@ -1,10 +1,10 @@
package org.drools.base.mvel;
-import java.io.Serializable;
import java.io.Externalizable;
import java.io.IOException;
-import java.io.ObjectOutput;
import java.io.ObjectInput;
+import java.io.ObjectOutput;
+import java.io.Serializable;
import org.drools.WorkingMemory;
import org.drools.rule.MVELDialectRuntimeData;
@@ -25,8 +25,7 @@ public class MVELSalienceExpression
private String id;
private Serializable expr;
- // @FIXME this factory is not threadsave for rulebases
- private DroolsMVELFactory factory;
+ private DroolsMVELFactory prototype;
public MVELSalienceExpression() {
}
@@ -50,16 +49,17 @@ public void writeExternal(ObjectOutput out) throws IOException {
public void compile(ClassLoader classLoader) {
expr = unit.getCompiledExpression( classLoader );
- factory = unit.getFactory();
+ prototype = unit.getFactory();
}
public int getValue(final Tuple tuple,
final WorkingMemory workingMemory) {
- this.factory.setContext( tuple,
- null,
- null,
- workingMemory,
- null );
+ DroolsMVELFactory factory = (DroolsMVELFactory) this.prototype.clone();
+ factory.setContext( tuple,
+ null,
+ null,
+ workingMemory,
+ null );
// do we have any functions for this namespace?
Package pkg = workingMemory.getRuleBase().getPackage( "MAIN" );
@@ -69,7 +69,7 @@ public int getValue(final Tuple tuple,
}
return ((Number) MVEL.executeExpression( this.expr,
- this.factory )).intValue();
+ factory )).intValue();
}
}
|
70d5d2c168bd477e3b8330fd7802b280d1f72b8e
|
camel
|
Set the isCreateCamelContextPerClass on tests- that can pass with it to speed up the tests--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1152396 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/camel
|
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/AbstractCXFGreeterRouterTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/AbstractCXFGreeterRouterTest.java
index 54b4ae4946afe..0e1c4525d2cfc 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/AbstractCXFGreeterRouterTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/AbstractCXFGreeterRouterTest.java
@@ -58,7 +58,6 @@ public static int getPort2() {
return CXFTestSupport.getPort2();
}
-
protected abstract ClassPathXmlApplicationContext createApplicationContext();
@Before
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/AbstractCxfWsdlFirstTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/AbstractCxfWsdlFirstTest.java
index 5c897860f286e..6bbf10faf28e6 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/AbstractCxfWsdlFirstTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/AbstractCxfWsdlFirstTest.java
@@ -50,6 +50,10 @@ public static int getPort2() {
return CXFTestSupport.getPort2();
}
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
@Test
public void testInvokingServiceFromCXFClient() throws Exception {
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeMultiPartNoSpringTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeMultiPartNoSpringTest.java
index b514ad825954d..ded24d8c76247 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeMultiPartNoSpringTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeMultiPartNoSpringTest.java
@@ -48,6 +48,11 @@ public class CXFWsdlOnlyPayloadModeMultiPartNoSpringTest extends CamelTestSuppor
+ "/CXFWsdlOnlyPayloadModeMultiPartNoSpringTest/PersonMultiPart";
protected Endpoint endpoint;
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
+
@Before
public void startService() {
endpoint = Endpoint.publish(SERVICE_ADDRESS, new PersonMultiPartImpl());
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeNoSpringSoap12Test.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeNoSpringSoap12Test.java
index d89d35da58aa0..c70f8b6cf0f00 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeNoSpringSoap12Test.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeNoSpringSoap12Test.java
@@ -25,7 +25,11 @@
public class CXFWsdlOnlyPayloadModeNoSpringSoap12Test extends CXFWsdlOnlyPayloadModeNoSpringTest {
-
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
+
@Before
public void startService() {
endpoint = Endpoint.publish("http://localhost:" + port1 + "/"
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeNoSpringTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeNoSpringTest.java
index 0e785aa9f3d74..80cb403d02d21 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeNoSpringTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyPayloadModeNoSpringTest.java
@@ -52,6 +52,11 @@ public class CXFWsdlOnlyPayloadModeNoSpringTest extends CamelTestSupport {
protected int port1 = CXFTestSupport.getPort1();
protected int port2 = CXFTestSupport.getPort2();
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
+
@Before
public void startService() {
endpoint = Endpoint.publish("http://localhost:" + port1 + "/" + getClass().getSimpleName()
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyTest.java
index 5f4730a2ffdb7..e1060f17c7daf 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CXFWsdlOnlyTest.java
@@ -45,6 +45,9 @@ public class CXFWsdlOnlyTest extends CamelSpringTestSupport {
private static int port3 = CXFTestSupport.getPort3();
private static int port4 = CXFTestSupport.getPort4();
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
protected ClassPathXmlApplicationContext createApplicationContext() {
System.setProperty("CXFWsdlOnlyTest.port1", Integer.toString(port1));
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerMessageTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerMessageTest.java
index b21895182a9f0..bf8a8012d4590 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerMessageTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerMessageTest.java
@@ -47,6 +47,10 @@ public class CxfConsumerMessageTest extends CamelTestSupport {
protected final String simpleEndpointURI = "cxf://" + simpleEndpointAddress
+ "?serviceClass=org.apache.camel.component.cxf.HelloService";
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerPayloadFaultTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerPayloadFaultTest.java
index 8686abb06e8ac..b1fdb15996919 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerPayloadFaultTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerPayloadFaultTest.java
@@ -65,6 +65,10 @@ public class CxfConsumerPayloadFaultTest extends CamelTestSupport {
protected final String fromURI = "cxf://" + serviceAddress + "?"
+ PORT_NAME_PROP + "&" + SERVICE_NAME_PROP + "&" + WSDL_URL_PROP + "&dataFormat=" + DataFormat.PAYLOAD;
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerProviderTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerProviderTest.java
index 76e6eaa11be83..64a932d1f28ea 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerProviderTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerProviderTest.java
@@ -49,6 +49,10 @@ public class CxfConsumerProviderTest extends CamelTestSupport {
+ "?serviceClass=org.apache.camel.component.cxf.ServiceProvider";
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerResponseTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerResponseTest.java
index 293957ccce39e..7b89881c07e20 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerResponseTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerResponseTest.java
@@ -53,6 +53,10 @@ public class CxfConsumerResponseTest extends CamelTestSupport {
+ "&publishedEndpointUrl=http://www.simple.com/services/test";
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
// START SNIPPET: example
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerTest.java
index d87323e65e403..e096593b142a5 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfConsumerTest.java
@@ -51,7 +51,12 @@ public class CxfConsumerTest extends CamelTestSupport {
private static final String ECHO_OPERATION = "echo";
private static final String ECHO_BOOLEAN_OPERATION = "echoBoolean";
private static final String TEST_MESSAGE = "Hello World!";
-
+
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
+
// START SNIPPET: example
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfCustomerStartStopTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfCustomerStartStopTest.java
index 66986987d7dbd..25543fb4a9d3e 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfCustomerStartStopTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfCustomerStartStopTest.java
@@ -32,7 +32,10 @@
@org.junit.Ignore
public class CxfCustomerStartStopTest extends Assert {
static final int PORT1 = CXFTestSupport.getPort1();
- static final int PORT2 = CXFTestSupport.getPort1();
+ static final int PORT2 = CXFTestSupport.getPort1();
+
+
+
@Test
public void startAndStopService() throws Exception {
CamelContext context = new DefaultCamelContext();
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfCustomizedExceptionTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfCustomizedExceptionTest.java
index 3d32fc1c8d2a7..e67a734070d7f 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfCustomizedExceptionTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfCustomizedExceptionTest.java
@@ -69,6 +69,10 @@ public class CxfCustomizedExceptionTest extends CamelTestSupport {
private Bus bus;
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
@Override
@Before
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfDispatchTestSupport.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfDispatchTestSupport.java
index cf85aa427887f..7812026967f45 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfDispatchTestSupport.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfDispatchTestSupport.java
@@ -52,6 +52,10 @@ public abstract class CxfDispatchTestSupport extends CamelSpringTestSupport {
protected Endpoint endpoint;
private int port = CXFTestSupport.getPort1();
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
@Before
public void startService() {
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfJavaOnlyPayloadModeTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfJavaOnlyPayloadModeTest.java
index 4b72cd6993c2b..b9e92eb47bf65 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfJavaOnlyPayloadModeTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfJavaOnlyPayloadModeTest.java
@@ -39,6 +39,10 @@ public class CxfJavaOnlyPayloadModeTest extends CamelTestSupport {
+ "&portName={http://camel.apache.org/wsdl-first}soap"
+ "&dataFormat=PAYLOAD"
+ "&properties.exceptionMessageCauseEnabled=true&properties.faultStackTraceEnabled=true";
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
@Test
public void testCxfJavaOnly() throws Exception {
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfMixedModeRouterTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfMixedModeRouterTest.java
index 9dce087c756a9..906cc0f4de7fb 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfMixedModeRouterTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfMixedModeRouterTest.java
@@ -52,7 +52,11 @@ public class CxfMixedModeRouterTest extends CamelTestSupport {
private String routerEndpointURI = "cxf://" + ROUTER_ADDRESS + "?" + SERVICE_CLASS + "&dataFormat=PAYLOAD";
private String serviceEndpointURI = "cxf://" + SERVICE_ADDRESS + "?" + SERVICE_CLASS + "&dataFormat=POJO";
-
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
+
@BeforeClass
public static void startService() {
//start a service
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfNonWrapperTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfNonWrapperTest.java
index ef033e3a5616a..74c02d60e155a 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfNonWrapperTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfNonWrapperTest.java
@@ -34,7 +34,11 @@
public class CxfNonWrapperTest extends CamelSpringTestSupport {
int port1 = CXFTestSupport.getPort1();
-
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
+
protected ClassPathXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/component/cxf/nonWrapperProcessor.xml");
}
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfPayLoadSoapHeaderTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfPayLoadSoapHeaderTest.java
index 073deff09346b..c95d33741dbc8 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfPayLoadSoapHeaderTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfPayLoadSoapHeaderTest.java
@@ -56,7 +56,11 @@ protected String getServiceEndpointURI() {
return "cxf:http://localhost:" + port2 + "/" + getClass().getSimpleName()
+ "/new_pizza_service/services/PizzaService?wsdlURL=classpath:pizza_service.wsdl&dataFormat=PAYLOAD";
}
-
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
+
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerProtocalHeaderTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerProtocalHeaderTest.java
index c48127dc68af9..257ca29af2856 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerProtocalHeaderTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerProtocalHeaderTest.java
@@ -38,6 +38,11 @@ public class CxfProducerProtocalHeaderTest extends CamelTestSupport {
+ "<return xmlns=\"http://cxf.component.camel.apache.org/\">echo Hello World!</return>"
+ "</ns1:echoResponse></soap:Body></soap:Envelope>";
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
+
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerRouterTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerRouterTest.java
index 10727974f50cc..f0db44c96dc09 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerRouterTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerRouterTest.java
@@ -51,6 +51,10 @@ public class CxfProducerRouterTest extends CamelTestSupport {
private static final String ECHO_OPERATION = "echo";
private static final String TEST_MESSAGE = "Hello World!";
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
@BeforeClass
public static void startServer() throws Exception {
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerSynchronousFalseTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerSynchronousFalseTest.java
index 074b7dd967e7a..28633d96e1322 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerSynchronousFalseTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerSynchronousFalseTest.java
@@ -40,6 +40,10 @@ public class CxfProducerSynchronousFalseTest extends CamelTestSupport {
private String url = "cxf://" + SIMPLE_SERVER_ADDRESS
+ "?serviceClass=org.apache.camel.component.cxf.HelloService&dataFormat=MESSAGE&synchronous=false";
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
@BeforeClass
public static void startServer() throws Exception {
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerSynchronousTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerSynchronousTest.java
index e15338b2966ab..6f9760d4b3285 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerSynchronousTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerSynchronousTest.java
@@ -39,6 +39,10 @@ public class CxfProducerSynchronousTest extends CamelTestSupport {
private String url = "cxf://" + SIMPLE_SERVER_ADDRESS
+ "?serviceClass=org.apache.camel.component.cxf.HelloService&dataFormat=MESSAGE&synchronous=true";
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
@BeforeClass
public static void startServer() throws Exception {
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerTest.java
index 7f5f0652ec96d..76da36e080567 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfProducerTest.java
@@ -67,7 +67,6 @@ protected String getWrongServerAddress() {
return "http://localhost:" + CXFTestSupport.getPort3() + "/" + getClass().getSimpleName() + "/test";
}
-
@Before
public void startService() throws Exception {
// start a simple front service
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfRawMessageRouterTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfRawMessageRouterTest.java
index 40cb2d44f3443..9a21fa8c98de4 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfRawMessageRouterTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfRawMessageRouterTest.java
@@ -36,7 +36,11 @@ public void configure() {
}
};
}
-
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
+
@Test
public void testTheContentType() throws Exception {
MockEndpoint result = getMockEndpoint("mock:result");
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSimpleRouterTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSimpleRouterTest.java
index c9cad6c10711b..c8fae21f095fb 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSimpleRouterTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSimpleRouterTest.java
@@ -51,7 +51,11 @@ protected String getServiceAddress() {
protected void configureFactory(ServerFactoryBean svrBean) {
}
-
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
+
@Before
public void startService() {
//start a service
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSoapMessageProviderTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSoapMessageProviderTest.java
index 4049f62f3ff1a..0b53668bea282 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSoapMessageProviderTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSoapMessageProviderTest.java
@@ -38,6 +38,10 @@ public class CxfSoapMessageProviderTest extends CamelSpringTestSupport {
protected ClassPathXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/component/cxf/SoapMessageProviderContext.xml");
}
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
@Test
public void testSOAPMessageModeDocLit() throws Exception {
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSpringCustomizedExceptionTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSpringCustomizedExceptionTest.java
index a39274bfe0406..c6e57a5e41403 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSpringCustomizedExceptionTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfSpringCustomizedExceptionTest.java
@@ -49,7 +49,11 @@ public class CxfSpringCustomizedExceptionTest extends CamelTestSupport {
// END SNIPPET: FaultDefine
}
-
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
+
@Before
public void setUp() throws Exception {
CXFTestSupport.getPort1();
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfTimeoutTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfTimeoutTest.java
index 8971deca12021..f57ded3e6427b 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfTimeoutTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfTimeoutTest.java
@@ -39,6 +39,11 @@ public class CxfTimeoutTest extends CamelSpringTestSupport {
protected static final String JAXWS_SERVER_ADDRESS
= "http://localhost:" + CXFTestSupport.getPort1() + "/CxfTimeoutTest/SoapContext/SoapPort";
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
+
@BeforeClass
public static void startService() {
Greeter implementor = new GreeterImplWithSleep();
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstPayloadModeTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstPayloadModeTest.java
index 764040443f881..cb1ee7710a9d8 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstPayloadModeTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstPayloadModeTest.java
@@ -34,6 +34,10 @@
public class CxfWsdlFirstPayloadModeTest extends AbstractCxfWsdlFirstTest {
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
@BeforeClass
public static void startService() {
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstProcessorTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstProcessorTest.java
index 0dcfc8e25cab8..64580aa5d5072 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstProcessorTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstProcessorTest.java
@@ -25,6 +25,10 @@
import org.springframework.context.support.ClassPathXmlApplicationContext;
public class CxfWsdlFirstProcessorTest extends AbstractCxfWsdlFirstTest {
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
protected ClassPathXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/component/cxf/WsdlFirstProcessor.xml");
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstTest.java
index 4fdf0390318c2..71237e359aaf5 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/CxfWsdlFirstTest.java
@@ -41,6 +41,10 @@
import org.springframework.context.support.ClassPathXmlApplicationContext;
public class CxfWsdlFirstTest extends AbstractCxfWsdlFirstTest {
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
protected ClassPathXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/component/cxf/WsdlFirstBeans.xml");
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/converter/CxfPayloadConverterTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/converter/CxfPayloadConverterTest.java
index adc093a53375c..e5e915ca4c6e1 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/converter/CxfPayloadConverterTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/converter/CxfPayloadConverterTest.java
@@ -41,6 +41,10 @@ public class CxfPayloadConverterTest extends ExchangeTestSupport {
private CxfPayload<String[]> payload;
private CxfPayload<String[]> emptyPayload;
private FileInputStream inputStream;
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
@Override
@Before
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsProducerTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsProducerTest.java
index 00bb7c29633f0..e4fd5798a1f54 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsProducerTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsProducerTest.java
@@ -49,6 +49,10 @@ public void process(Exchange exchange) throws Exception {
exchange.getOut().setBody(inMessage.getHeader(Exchange.HTTP_QUERY, String.class));
}
}
+ @Override
+ public boolean isCreateCamelContextPerClass() {
+ return true;
+ }
public int getPort1() {
return port1;
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsRouterTest.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsRouterTest.java
index 3506b2d6fa37e..7527d4784b45c 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsRouterTest.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsRouterTest.java
@@ -21,6 +21,7 @@
import org.apache.camel.test.junit4.CamelSpringTestSupport;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
+import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
@@ -154,6 +155,9 @@ public void testPostConsumer() throws Exception {
assertEquals(200, response.getStatusLine().getStatusCode());
assertEquals("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?><Customer><id>124</id><name>Jack</name></Customer>",
EntityUtils.toString(response.getEntity()));
+
+ HttpDelete del = new HttpDelete("http://localhost:" + PORT0 + "/CxfRsRouterTest/route/customerservice/customers/124/");
+ httpclient.execute(del);
} finally {
httpclient.getConnectionManager().shutdown();
}
@@ -174,6 +178,9 @@ public void testPostConsumerUniqueResponseCode() throws Exception {
assertEquals(201, response.getStatusLine().getStatusCode());
assertEquals("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?><Customer><id>124</id><name>Jack</name></Customer>",
EntityUtils.toString(response.getEntity()));
+
+ HttpDelete del = new HttpDelete("http://localhost:" + PORT0 + "/CxfRsRouterTest/route/customerservice/customers/124/");
+ httpclient.execute(del);
} finally {
httpclient.getConnectionManager().shutdown();
}
diff --git a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/testbean/CustomerService.java b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/testbean/CustomerService.java
index 1dbdb72f4422e..c90b4bb1ed600 100644
--- a/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/testbean/CustomerService.java
+++ b/components/camel-cxf/src/test/java/org/apache/camel/component/cxf/jaxrs/testbean/CustomerService.java
@@ -120,7 +120,9 @@ public Response deleteCustomer(@PathParam("id") String id) {
} else {
r = Response.notModified().build();
}
-
+ if (idNumber == currentId) {
+ --currentId;
+ }
return r;
}
diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/BrowsableQueueTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/BrowsableQueueTest.java
index 896977f5456ff..966b5a1631e32 100644
--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/BrowsableQueueTest.java
+++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/BrowsableQueueTest.java
@@ -24,11 +24,14 @@
import org.apache.camel.Exchange;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.test.junit4.CamelTestSupport;
+
+import org.junit.After;
+import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import static org.apache.camel.component.jms.JmsComponent.jmsComponentAutoAcknowledge;
+import static org.apache.camel.component.jms.JmsComponent.jmsComponent;
/**
* @version
@@ -41,16 +44,29 @@ public class BrowsableQueueTest extends CamelTestSupport {
protected int counter;
protected Object[] expectedBodies = {"body1", "body2"};
+ @Before
+ public void setUp() throws Exception {
+ long start = System.currentTimeMillis();
+ super.setUp();
+ System.out.println("Start: " + (System.currentTimeMillis() - start));
+ }
+ @After
+ public void tearDown() throws Exception {
+ long start = System.currentTimeMillis();
+ super.tearDown();
+ System.out.println("Stop: " + (System.currentTimeMillis() - start));
+ }
+
@Test
public void testSendMessagesThenBrowseQueue() throws Exception {
// send some messages
for (int i = 0; i < expectedBodies.length; i++) {
Object expectedBody = expectedBodies[i];
- template.sendBodyAndHeader("activemq:test.b", expectedBody, "counter", i);
+ template.sendBodyAndHeader("activemq:BrowsableQueueTest.b", expectedBody, "counter", i);
}
// now lets browse the queue
- JmsQueueEndpoint endpoint = getMandatoryEndpoint("activemq:test.b?maximumBrowseSize=6", JmsQueueEndpoint.class);
+ JmsQueueEndpoint endpoint = getMandatoryEndpoint("activemq:BrowsableQueueTest.b?maximumBrowseSize=6", JmsQueueEndpoint.class);
assertEquals(6, endpoint.getMaximumBrowseSize());
List<Exchange> list = endpoint.getExchanges();
LOG.debug("Received: " + list);
@@ -80,8 +96,8 @@ protected void sendExchange(final Object expectedBody) {
protected CamelContext createCamelContext() throws Exception {
CamelContext camelContext = super.createCamelContext();
- ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory();
- camelContext.addComponent(componentName, jmsComponentAutoAcknowledge(connectionFactory));
+ JmsComponent comp = jmsComponent(CamelJmsTestHelper.getSharedConfig());
+ camelContext.addComponent(componentName, comp);
return camelContext;
}
@@ -89,7 +105,7 @@ protected CamelContext createCamelContext() throws Exception {
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
public void configure() throws Exception {
- from("activemq:test.a").to("activemq:test.b");
+ from("activemq:BrowsableQueueTest.a").to("activemq:BrowsableQueueTest.b");
}
};
}
diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/ConsumeMessageConverterTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/ConsumeMessageConverterTest.java
index 2e06ccb8942a2..ac4fd990b7dd6 100644
--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/ConsumeMessageConverterTest.java
+++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/ConsumeMessageConverterTest.java
@@ -32,7 +32,7 @@
import org.springframework.jms.support.converter.MessageConversionException;
import org.springframework.jms.support.converter.MessageConverter;
-import static org.apache.camel.component.jms.JmsComponent.jmsComponentAutoAcknowledge;
+import static org.apache.camel.component.jms.JmsComponent.jmsComponent;
/**
* @version
@@ -49,8 +49,7 @@ protected JndiRegistry createRegistry() throws Exception {
protected CamelContext createCamelContext() throws Exception {
CamelContext camelContext = super.createCamelContext();
- ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory();
- camelContext.addComponent("activemq", jmsComponentAutoAcknowledge(connectionFactory));
+ camelContext.addComponent("activemq", jmsComponent(CamelJmsTestHelper.getSharedConfig()));
return camelContext;
}
@@ -61,7 +60,7 @@ public void testTextMessage() throws Exception {
mock.expectedMessageCount(1);
mock.message(0).body().isInstanceOf(TextMessage.class);
- template.sendBody("activemq:queue:hello", "Hello World");
+ template.sendBody("activemq:queue:ConsumeMessageConverterTest.hello", "Hello World");
assertMockEndpointsSatisfied();
}
@@ -72,7 +71,7 @@ public void testBytesMessage() throws Exception {
mock.expectedMessageCount(1);
mock.message(0).body().isInstanceOf(BytesMessage.class);
- template.sendBody("activemq:queue:hello", "Hello World".getBytes());
+ template.sendBody("activemq:queue:ConsumeMessageConverterTest.hello", "Hello World".getBytes());
assertMockEndpointsSatisfied();
}
@@ -80,7 +79,7 @@ public void testBytesMessage() throws Exception {
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
public void configure() throws Exception {
- from("activemq:queue:hello?messageConverter=#myMessageConverter").to("mock:result");
+ from("activemq:queue:ConsumeMessageConverterTest.hello?messageConverter=#myMessageConverter").to("mock:result");
}
};
}
diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/FileRouteToJmsToFileTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/FileRouteToJmsToFileTest.java
index c1b570225fa34..ca060781651f8 100644
--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/FileRouteToJmsToFileTest.java
+++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/FileRouteToJmsToFileTest.java
@@ -27,7 +27,7 @@
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Test;
-import static org.apache.camel.component.jms.JmsComponent.jmsComponentAutoAcknowledge;
+import static org.apache.camel.component.jms.JmsComponent.jmsComponent;
/**
* Unit test that we can do file over JMS to file.
@@ -39,7 +39,7 @@ public class FileRouteToJmsToFileTest extends CamelTestSupport {
@Test
public void testRouteFileToFile() throws Exception {
deleteDirectory("target/file2file");
- NotifyBuilder notify = new NotifyBuilder(context).from("activemq:queue:hello").whenDone(1).create();
+ NotifyBuilder notify = new NotifyBuilder(context).from("activemq:queue:FileRouteToJmsToFileTest.hello").whenDone(1).create();
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
@@ -58,8 +58,7 @@ public void testRouteFileToFile() throws Exception {
protected CamelContext createCamelContext() throws Exception {
CamelContext camelContext = super.createCamelContext();
- ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory();
- camelContext.addComponent(componentName, jmsComponentAutoAcknowledge(connectionFactory));
+ camelContext.addComponent(componentName, jmsComponent(CamelJmsTestHelper.getSharedConfig()));
return camelContext;
}
@@ -67,9 +66,9 @@ protected CamelContext createCamelContext() throws Exception {
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
public void configure() throws Exception {
- from("file://target/file2file/in").to("activemq:queue:hello");
+ from("file://target/file2file/in").to("activemq:queue:FileRouteToJmsToFileTest.hello");
- from("activemq:queue:hello").to("file://target/file2file/out", "mock:result");
+ from("activemq:queue:FileRouteToJmsToFileTest.hello").to("file://target/file2file/out", "mock:result");
}
};
}
diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsAutoStartupTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsAutoStartupTest.java
index a62787c32009c..e643a301812d6 100644
--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsAutoStartupTest.java
+++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsAutoStartupTest.java
@@ -44,7 +44,7 @@ public void testAutoStartup() throws Exception {
// should be stopped by default
mock.expectedMessageCount(0);
- template.sendBody("activemq:queue:foo", "Hello World");
+ template.sendBody("activemq:queue:JmsAutoStartupTest.foo", "Hello World");
Thread.sleep(2000);
@@ -64,7 +64,7 @@ protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
- endpoint = context.getEndpoint("activemq:queue:foo?autoStartup=false", JmsEndpoint.class);
+ endpoint = context.getEndpoint("activemq:queue:JmsAutoStartupTest.foo?autoStartup=false", JmsEndpoint.class);
from(endpoint).to("mock:result");
}
diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsBatchResequencerJMSPriorityTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsBatchResequencerJMSPriorityTest.java
index a86c1e38dceb0..2460e0265bc51 100644
--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsBatchResequencerJMSPriorityTest.java
+++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsBatchResequencerJMSPriorityTest.java
@@ -40,14 +40,14 @@ public void testBatchResequencerJMSPriority() throws Exception {
mock.expectedBodiesReceived("G", "A", "B", "E", "H", "C", "D", "F");
// must use preserveMessageQos=true to be able to specify the JMSPriority to be used
- template.sendBodyAndHeader("jms:queue:foo?preserveMessageQos=true", "A", "JMSPriority", 6);
- template.sendBodyAndHeader("jms:queue:foo?preserveMessageQos=true", "B", "JMSPriority", 6);
- template.sendBodyAndHeader("jms:queue:foo?preserveMessageQos=true", "C", "JMSPriority", 4);
- template.sendBodyAndHeader("jms:queue:foo?preserveMessageQos=true", "D", "JMSPriority", 4);
- template.sendBodyAndHeader("jms:queue:foo?preserveMessageQos=true", "E", "JMSPriority", 6);
- template.sendBodyAndHeader("jms:queue:foo?preserveMessageQos=true", "F", "JMSPriority", 4);
- template.sendBodyAndHeader("jms:queue:foo?preserveMessageQos=true", "G", "JMSPriority", 8);
- template.sendBodyAndHeader("jms:queue:foo?preserveMessageQos=true", "H", "JMSPriority", 6);
+ template.sendBodyAndHeader("jms:queue:JmsBatchResequencerJMSPriorityTest.foo?preserveMessageQos=true", "A", "JMSPriority", 6);
+ template.sendBodyAndHeader("jms:queue:JmsBatchResequencerJMSPriorityTest.foo?preserveMessageQos=true", "B", "JMSPriority", 6);
+ template.sendBodyAndHeader("jms:queue:JmsBatchResequencerJMSPriorityTest.foo?preserveMessageQos=true", "C", "JMSPriority", 4);
+ template.sendBodyAndHeader("jms:queue:JmsBatchResequencerJMSPriorityTest.foo?preserveMessageQos=true", "D", "JMSPriority", 4);
+ template.sendBodyAndHeader("jms:queue:JmsBatchResequencerJMSPriorityTest.foo?preserveMessageQos=true", "E", "JMSPriority", 6);
+ template.sendBodyAndHeader("jms:queue:JmsBatchResequencerJMSPriorityTest.foo?preserveMessageQos=true", "F", "JMSPriority", 4);
+ template.sendBodyAndHeader("jms:queue:JmsBatchResequencerJMSPriorityTest.foo?preserveMessageQos=true", "G", "JMSPriority", 8);
+ template.sendBodyAndHeader("jms:queue:JmsBatchResequencerJMSPriorityTest.foo?preserveMessageQos=true", "H", "JMSPriority", 6);
assertMockEndpointsSatisfied();
}
@@ -55,7 +55,7 @@ public void testBatchResequencerJMSPriority() throws Exception {
protected CamelContext createCamelContext() throws Exception {
CamelContext camelContext = super.createCamelContext();
- ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory();
+ ConnectionFactory connectionFactory = CamelJmsTestHelper.getSharedConnectionFactory();
camelContext.addComponent("jms", jmsComponentAutoAcknowledge(connectionFactory));
return camelContext;
@@ -67,7 +67,7 @@ protected RouteBuilder createRouteBuilder() throws Exception {
@Override
public void configure() throws Exception {
// START SNIPPET: e1
- from("jms:queue:foo")
+ from("jms:queue:JmsBatchResequencerJMSPriorityTest.foo")
// sort by JMSPriority by allowing duplicates (message can have same JMSPriority)
// and use reverse ordering so 9 is first output (most important), and 0 is last
// use batch mode and fire every 3th second
diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsComponentTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsComponentTest.java
index 40d74a424df78..fb97686fda8d0 100644
--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsComponentTest.java
+++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsComponentTest.java
@@ -34,7 +34,7 @@ public class JmsComponentTest extends CamelTestSupport {
@Test
public void testComponentOptions() throws Exception {
- String reply = template.requestBody("activemq123:queue:hello?requestTimeout=5000", "Hello World", String.class);
+ String reply = template.requestBody("activemq123:queue:JmsComponentTest.hello?requestTimeout=5000", "Hello World", String.class);
assertEquals("Bye World", reply);
assertEquals(true, endpoint.isAcceptMessagesWhileStopping());
@@ -60,7 +60,7 @@ public void testComponentOptions() throws Exception {
protected CamelContext createCamelContext() throws Exception {
CamelContext camelContext = super.createCamelContext();
- ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory();
+ ConnectionFactory connectionFactory = CamelJmsTestHelper.getSharedConnectionFactory();
JmsComponent comp = jmsComponentAutoAcknowledge(connectionFactory);
comp.setAcceptMessagesWhileStopping(true);
@@ -84,7 +84,7 @@ protected CamelContext createCamelContext() throws Exception {
camelContext.addComponent(componentName, comp);
- endpoint = (JmsEndpoint) comp.createEndpoint("queue:hello");
+ endpoint = (JmsEndpoint) comp.createEndpoint("queue:JmsComponentTest.hello");
return camelContext;
}
diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsConsumerRestartPickupConfigurationChangesTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsConsumerRestartPickupConfigurationChangesTest.java
index 8b5e8e1eb070e..f421e09088aa6 100644
--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsConsumerRestartPickupConfigurationChangesTest.java
+++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsConsumerRestartPickupConfigurationChangesTest.java
@@ -33,7 +33,7 @@ public class JmsConsumerRestartPickupConfigurationChangesTest extends CamelTestS
@Test
public void testRestartJmsConsumerPickupChanges() throws Exception {
- JmsEndpoint endpoint = context.getEndpoint("activemq:queue:foo", JmsEndpoint.class);
+ JmsEndpoint endpoint = context.getEndpoint("activemq:queue:JmsConsumerRestartPickupConfigurationChangesTest.foo", JmsEndpoint.class);
JmsConsumer consumer = endpoint.createConsumer(new Processor() {
public void process(Exchange exchange) throws Exception {
template.send("mock:result", exchange);
@@ -44,7 +44,7 @@ public void process(Exchange exchange) throws Exception {
MockEndpoint result = getMockEndpoint("mock:result");
result.expectedBodiesReceived("Hello World");
- template.sendBody("activemq:queue:foo", "Hello World");
+ template.sendBody("activemq:queue:JmsConsumerRestartPickupConfigurationChangesTest.foo", "Hello World");
assertMockEndpointsSatisfied();
consumer.stop();
@@ -58,7 +58,7 @@ public void process(Exchange exchange) throws Exception {
result.reset();
result.expectedBodiesReceived("Bye World");
- template.sendBody("activemq:queue:bar", "Bye World");
+ template.sendBody("activemq:queue:JmsConsumerRestartPickupConfigurationChangesTest.bar", "Bye World");
assertMockEndpointsSatisfied();
consumer.stop();
@@ -67,7 +67,7 @@ public void process(Exchange exchange) throws Exception {
protected CamelContext createCamelContext() throws Exception {
CamelContext camelContext = super.createCamelContext();
- ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory();
+ ConnectionFactory connectionFactory = CamelJmsTestHelper.getSharedConnectionFactory();
camelContext.addComponent("activemq", jmsComponentAutoAcknowledge(connectionFactory));
return camelContext;
diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsProduerConcurrentTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsProduerConcurrentTest.java
index ba919e5addbf6..fd9dab74a9bb2 100644
--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsProduerConcurrentTest.java
+++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsProduerConcurrentTest.java
@@ -64,7 +64,7 @@ public Object call() throws Exception {
protected CamelContext createCamelContext() throws Exception {
CamelContext camelContext = super.createCamelContext();
- ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory();
+ ConnectionFactory connectionFactory = CamelJmsTestHelper.getSharedConnectionFactory();
camelContext.addComponent("jms", jmsComponentAutoAcknowledge(connectionFactory));
return camelContext;
@@ -75,9 +75,9 @@ protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
- from("direct:start").to("jms:queue:foo");
+ from("direct:start").to("jms:queue:foo-JmsProducerConcurrentTest");
- from("jms:queue:foo").to("mock:result");
+ from("jms:queue:foo-JmsProducerConcurrentTest").to("mock:result");
}
};
}
diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsProduerConcurrentWithReplyTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsProduerConcurrentWithReplyTest.java
index d7bf2dc9b6f50..15fcead7bf1e8 100644
--- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsProduerConcurrentWithReplyTest.java
+++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsProduerConcurrentWithReplyTest.java
@@ -73,7 +73,7 @@ public Object call() throws Exception {
protected CamelContext createCamelContext() throws Exception {
CamelContext camelContext = super.createCamelContext();
- ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory();
+ ConnectionFactory connectionFactory = CamelJmsTestHelper.getSharedConnectionFactory();
camelContext.addComponent("jms", jmsComponentAutoAcknowledge(connectionFactory));
return camelContext;
@@ -84,9 +84,9 @@ protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
- from("direct:start").to("jms:queue:foo");
+ from("direct:start").to("jms:queue:foo-JmsProduerConcurrentWithReplyTest");
- from("jms:queue:foo?concurrentConsumers=5").transform(simple("Bye ${in.body}")).to("mock:result");
+ from("jms:queue:foo-JmsProduerConcurrentWithReplyTest?concurrentConsumers=5").transform(simple("Bye ${in.body}")).to("mock:result");
}
};
}
|
fef434b11eb3abf88fca6ac3073a5025447a646d
|
orientdb
|
Fixed issue -1521 about JSON management of- embedded lists with different types--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java
index f4490b73182..7df87652f56 100755
--- a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java
@@ -212,13 +212,9 @@ else if (firstValue instanceof Enum<?>)
else {
linkedType = OType.getTypeByClass(firstValue.getClass());
- if (linkedType != OType.LINK) {
- // EMBEDDED FOR SURE SINCE IT CONTAINS JAVA TYPES
- if (linkedType == null) {
- linkedType = OType.EMBEDDED;
- // linkedClass = new OClass(firstValue.getClass());
- }
- }
+ if (linkedType != OType.LINK)
+ // EMBEDDED FOR SURE DON'T USE THE LINKED TYPE
+ linkedType = null;
}
if (type == null)
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/JSONTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/JSONTest.java
index 411fe57fa61..e702f9d9fc1 100755
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/JSONTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/JSONTest.java
@@ -15,7 +15,12 @@
*/
package com.orientechnologies.orient.test.database.auto;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.Parameters;
@@ -37,6 +42,11 @@
public class JSONTest {
private String url;
+// public static final void main(String[] args) throws Exception {
+// JSONTest test = new JSONTest("memory:test");
+// test.testList();
+// }
+
@Parameters(value = "url")
public JSONTest(final String iURL) {
url = iURL;
@@ -687,4 +697,17 @@ public void nestedJsonTest() {
db.close();
}
+
+ @Test
+ public void testList() throws Exception {
+ ODocument documentSource = new ODocument();
+ documentSource.fromJSON("{\"list\" : [\"string\", 42]}");
+
+ ODocument documentTarget = new ODocument();
+ documentTarget.fromStream(documentSource.toStream());
+
+ OTrackedList<Object> list = documentTarget.field("list", OType.EMBEDDEDLIST);
+ Assert.assertEquals(list.get(0), "string");
+ Assert.assertEquals(list.get(1), 42);
+ }
}
|
6257b7824382d7ef3193828019e3e293a92417f4
|
elasticsearch
|
Query DSL: Add `and`, `or`, and `not` filters,- closes -216--
|
a
|
https://github.com/elastic/elasticsearch
|
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/AndFilterBuilder.java b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/AndFilterBuilder.java
new file mode 100644
index 0000000000000..6366202ba9001
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/AndFilterBuilder.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.query.xcontent;
+
+import org.elasticsearch.util.collect.Lists;
+import org.elasticsearch.util.xcontent.builder.XContentBuilder;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+/**
+ * A filter that matches documents matching boolean combinations of other filters.
+ *
+ * @author kimchy (shay.banon)
+ */
+public class AndFilterBuilder extends BaseFilterBuilder {
+
+ private ArrayList<XContentFilterBuilder> filters = Lists.newArrayList();
+
+ private Boolean cache;
+
+ public AndFilterBuilder(XContentFilterBuilder... filters) {
+ for (XContentFilterBuilder filter : filters) {
+ this.filters.add(filter);
+ }
+ }
+
+ /**
+ * Adds a filter to the list of filters to "and".
+ */
+ public AndFilterBuilder add(XContentFilterBuilder filterBuilder) {
+ filters.add(filterBuilder);
+ return this;
+ }
+
+ /**
+ * Should the inner filters be cached or not. Defaults to <tt>true</tt>.
+ */
+ public AndFilterBuilder cache(boolean cache) {
+ this.cache = cache;
+ return this;
+ }
+
+ @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject(AndFilterParser.NAME);
+ builder.startArray("filters");
+ for (XContentFilterBuilder filter : filters) {
+ filter.toXContent(builder, params);
+ }
+ builder.endArray();
+ if (cache != null) {
+ builder.field("cache", cache);
+ }
+ builder.endObject();
+ }
+}
\ No newline at end of file
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/AndFilterParser.java b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/AndFilterParser.java
new file mode 100644
index 0000000000000..5dbb074a0c4bb
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/AndFilterParser.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.query.xcontent;
+
+import org.apache.lucene.search.Filter;
+import org.elasticsearch.index.AbstractIndexComponent;
+import org.elasticsearch.index.Index;
+import org.elasticsearch.index.query.QueryParsingException;
+import org.elasticsearch.index.settings.IndexSettings;
+import org.elasticsearch.util.inject.Inject;
+import org.elasticsearch.util.lucene.search.AndFilter;
+import org.elasticsearch.util.settings.Settings;
+import org.elasticsearch.util.xcontent.XContentParser;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import static org.elasticsearch.util.collect.Lists.*;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class AndFilterParser extends AbstractIndexComponent implements XContentFilterParser {
+
+ public static final String NAME = "and";
+
+ @Inject public AndFilterParser(Index index, @IndexSettings Settings settings) {
+ super(index, settings);
+ }
+
+ @Override public String[] names() {
+ return new String[]{NAME};
+ }
+
+ @Override public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
+ XContentParser parser = parseContext.parser();
+
+ ArrayList<Filter> filters = newArrayList();
+
+ boolean cache = true;
+
+ String currentFieldName = null;
+ XContentParser.Token token;
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ if (token == XContentParser.Token.FIELD_NAME) {
+ currentFieldName = parser.currentName();
+ } else if (token == XContentParser.Token.START_ARRAY) {
+ if ("filters".equals(currentFieldName)) {
+ while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
+ filters.add(parseContext.parseInnerFilter());
+ }
+ }
+ } else if (token.isValue()) {
+ if ("cache".equals(currentFieldName)) {
+ cache = parser.booleanValue();
+ }
+ }
+ }
+
+ if (cache) {
+ for (int i = 0; i < filters.size(); i++) {
+ filters.set(i, parseContext.cacheFilterIfPossible(filters.get(i)));
+ }
+ }
+ // no need to cache this one
+ return new AndFilter(filters);
+ }
+}
\ No newline at end of file
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/FilterBuilders.java b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/FilterBuilders.java
index a2bb7678516b8..5185c3afa3a04 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/FilterBuilders.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/FilterBuilders.java
@@ -179,6 +179,18 @@ public static BoolFilterBuilder boolFilter() {
return new BoolFilterBuilder();
}
+ public static AndFilterBuilder andFilter(XContentFilterBuilder... filters) {
+ return new AndFilterBuilder(filters);
+ }
+
+ public static OrFilterBuilder orFilter(XContentFilterBuilder... filters) {
+ return new OrFilterBuilder(filters);
+ }
+
+ public static NotFilterBuilder notFilter(XContentFilterBuilder filter) {
+ return new NotFilterBuilder(filter);
+ }
+
private FilterBuilders() {
}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/NotFilterBuilder.java b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/NotFilterBuilder.java
new file mode 100644
index 0000000000000..76e0b002bd62d
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/NotFilterBuilder.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.query.xcontent;
+
+import org.elasticsearch.util.xcontent.builder.XContentBuilder;
+
+import java.io.IOException;
+
+/**
+ * A filter that matches documents matching boolean combinations of other filters.
+ *
+ * @author kimchy (shay.banon)
+ */
+public class NotFilterBuilder extends BaseFilterBuilder {
+
+ private XContentFilterBuilder filter;
+
+ private Boolean cache;
+
+ public NotFilterBuilder(XContentFilterBuilder filter) {
+ this.filter = filter;
+ }
+
+ /**
+ * Should the inner filter be cached or not. Defaults to <tt>true</tt>.
+ */
+ public NotFilterBuilder cache(boolean cache) {
+ this.cache = cache;
+ return this;
+ }
+
+ @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject(NotFilterParser.NAME);
+ builder.field("filter");
+ filter.toXContent(builder, params);
+ if (cache != null) {
+ builder.field("cache", cache);
+ }
+ builder.endObject();
+ }
+}
\ No newline at end of file
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/NotFilterParser.java b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/NotFilterParser.java
new file mode 100644
index 0000000000000..41c3d9eb78188
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/NotFilterParser.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.query.xcontent;
+
+import org.apache.lucene.search.Filter;
+import org.elasticsearch.index.AbstractIndexComponent;
+import org.elasticsearch.index.Index;
+import org.elasticsearch.index.query.QueryParsingException;
+import org.elasticsearch.index.settings.IndexSettings;
+import org.elasticsearch.util.inject.Inject;
+import org.elasticsearch.util.lucene.search.NotFilter;
+import org.elasticsearch.util.settings.Settings;
+import org.elasticsearch.util.xcontent.XContentParser;
+
+import java.io.IOException;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class NotFilterParser extends AbstractIndexComponent implements XContentFilterParser {
+
+ public static final String NAME = "not";
+
+ @Inject public NotFilterParser(Index index, @IndexSettings Settings settings) {
+ super(index, settings);
+ }
+
+ @Override public String[] names() {
+ return new String[]{NAME};
+ }
+
+ @Override public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
+ XContentParser parser = parseContext.parser();
+
+ Filter filter = null;
+ boolean cache = true;
+
+ String currentFieldName = null;
+ XContentParser.Token token;
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ if (token == XContentParser.Token.FIELD_NAME) {
+ currentFieldName = parser.currentName();
+ } else if (token == XContentParser.Token.START_OBJECT) {
+ if ("filter".equals(currentFieldName)) {
+ filter = parseContext.parseInnerFilter();
+ }
+ } else if (token.isValue()) {
+ if ("cache".equals(currentFieldName)) {
+ cache = parser.booleanValue();
+ }
+ }
+ }
+
+ if (filter == null) {
+ throw new QueryParsingException(index, "filter is required when using `not` filter");
+ }
+
+ if (cache) {
+ filter = parseContext.cacheFilterIfPossible(filter);
+ }
+ // no need to cache this one
+ return new NotFilter(filter);
+ }
+}
\ No newline at end of file
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/OrFilterBuilder.java b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/OrFilterBuilder.java
new file mode 100644
index 0000000000000..b11e175825d3a
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/OrFilterBuilder.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.query.xcontent;
+
+import org.elasticsearch.util.collect.Lists;
+import org.elasticsearch.util.xcontent.builder.XContentBuilder;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+/**
+ * A filter that matches documents matching boolean combinations of other filters.
+ *
+ * @author kimchy (shay.banon)
+ */
+public class OrFilterBuilder extends BaseFilterBuilder {
+
+ private ArrayList<XContentFilterBuilder> filters = Lists.newArrayList();
+
+ private Boolean cache;
+
+ public OrFilterBuilder(XContentFilterBuilder... filters) {
+ for (XContentFilterBuilder filter : filters) {
+ this.filters.add(filter);
+ }
+ }
+
+ /**
+ * Adds a filter to the list of filters to "or".
+ */
+ public OrFilterBuilder add(XContentFilterBuilder filterBuilder) {
+ filters.add(filterBuilder);
+ return this;
+ }
+
+ /**
+ * Should the inner filters be cached or not. Defaults to <tt>true</tt>.
+ */
+ public OrFilterBuilder cache(boolean cache) {
+ this.cache = cache;
+ return this;
+ }
+
+ @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject(OrFilterParser.NAME);
+ builder.startArray("filters");
+ for (XContentFilterBuilder filter : filters) {
+ filter.toXContent(builder, params);
+ }
+ builder.endArray();
+ if (cache != null) {
+ builder.field("cache", cache);
+ }
+ builder.endObject();
+ }
+}
\ No newline at end of file
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/OrFilterParser.java b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/OrFilterParser.java
new file mode 100644
index 0000000000000..83f2e28bc4274
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/OrFilterParser.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.query.xcontent;
+
+import org.apache.lucene.search.Filter;
+import org.elasticsearch.index.AbstractIndexComponent;
+import org.elasticsearch.index.Index;
+import org.elasticsearch.index.query.QueryParsingException;
+import org.elasticsearch.index.settings.IndexSettings;
+import org.elasticsearch.util.inject.Inject;
+import org.elasticsearch.util.lucene.search.OrFilter;
+import org.elasticsearch.util.settings.Settings;
+import org.elasticsearch.util.xcontent.XContentParser;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import static org.elasticsearch.util.collect.Lists.*;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class OrFilterParser extends AbstractIndexComponent implements XContentFilterParser {
+
+ public static final String NAME = "or";
+
+ @Inject public OrFilterParser(Index index, @IndexSettings Settings settings) {
+ super(index, settings);
+ }
+
+ @Override public String[] names() {
+ return new String[]{NAME};
+ }
+
+ @Override public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
+ XContentParser parser = parseContext.parser();
+
+ ArrayList<Filter> filters = newArrayList();
+
+ boolean cache = true;
+
+ String currentFieldName = null;
+ XContentParser.Token token;
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ if (token == XContentParser.Token.FIELD_NAME) {
+ currentFieldName = parser.currentName();
+ } else if (token == XContentParser.Token.START_ARRAY) {
+ if ("filters".equals(currentFieldName)) {
+ while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
+ filters.add(parseContext.parseInnerFilter());
+ }
+ }
+ } else if (token.isValue()) {
+ if ("cache".equals(currentFieldName)) {
+ cache = parser.booleanValue();
+ }
+ }
+ }
+
+ if (cache) {
+ for (int i = 0; i < filters.size(); i++) {
+ filters.set(i, parseContext.cacheFilterIfPossible(filters.get(i)));
+ }
+ }
+ // no need to cache this one
+ return new OrFilter(filters);
+ }
+}
\ No newline at end of file
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/XContentQueryParserRegistry.java b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/XContentQueryParserRegistry.java
index 0f9e093735b2a..f06cf6b1fc5de 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/XContentQueryParserRegistry.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/index/query/xcontent/XContentQueryParserRegistry.java
@@ -86,6 +86,9 @@ public XContentQueryParserRegistry(Index index,
add(filterParsersMap, new PrefixFilterParser(index, indexSettings));
add(filterParsersMap, new QueryFilterParser(index, indexSettings));
add(filterParsersMap, new BoolFilterParser(index, indexSettings));
+ add(filterParsersMap, new AndFilterParser(index, indexSettings));
+ add(filterParsersMap, new OrFilterParser(index, indexSettings));
+ add(filterParsersMap, new NotFilterParser(index, indexSettings));
if (filterParsers != null) {
for (XContentFilterParser filterParser : filterParsers) {
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/search/facets/query/QueryFacetCollector.java b/modules/elasticsearch/src/main/java/org/elasticsearch/search/facets/query/QueryFacetCollector.java
index e7797aeb8f30f..1768a98777213 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/search/facets/query/QueryFacetCollector.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/search/facets/query/QueryFacetCollector.java
@@ -20,7 +20,6 @@
package org.elasticsearch.search.facets.query;
import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
@@ -49,12 +48,7 @@ public QueryFacetCollector(String facetName, Query query, FilterCache filterCach
}
@Override public void setNextReader(IndexReader reader, int docBase) throws IOException {
- DocIdSet docIdSet = filter.getDocIdSet(reader);
- if (docIdSet instanceof DocSet) {
- docSet = (DocSet) docIdSet;
- } else {
- docSet = DocSets.cacheable(reader, docIdSet);
- }
+ docSet = DocSets.convert(reader, filter.getDocIdSet(reader));
}
@Override public void collect(int doc) throws IOException {
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/docset/AndDocSet.java b/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/docset/AndDocSet.java
new file mode 100644
index 0000000000000..46a576fed02eb
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/docset/AndDocSet.java
@@ -0,0 +1,139 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.util.lucene.docset;
+
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.DocIdSetIterator;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class AndDocSet extends DocSet {
+
+ private final List<DocSet> sets;
+
+ public AndDocSet(List<DocSet> sets) {
+ this.sets = sets;
+ }
+
+ @Override public boolean get(int doc) throws IOException {
+ for (DocSet s : sets) {
+ if (!s.get(doc)) return false;
+ }
+ return true;
+ }
+
+ @Override public boolean isCacheable() {
+ for (DocSet set : sets) {
+ if (!set.isCacheable()) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ @Override public DocIdSetIterator iterator() throws IOException {
+ return new AndDocIdSetIterator();
+ }
+
+ class AndDocIdSetIterator extends DocIdSetIterator {
+ int lastReturn = -1;
+ private DocIdSetIterator[] iterators = null;
+
+ AndDocIdSetIterator() throws IOException {
+ iterators = new DocIdSetIterator[sets.size()];
+ int j = 0;
+ for (DocIdSet set : sets) {
+ if (set != null) {
+ DocIdSetIterator dcit = set.iterator();
+ iterators[j++] = dcit;
+ }
+ }
+ lastReturn = (iterators.length > 0 ? -1 : DocIdSetIterator.NO_MORE_DOCS);
+ }
+
+ @Override
+ public final int docID() {
+ return lastReturn;
+ }
+
+ @Override
+ public final int nextDoc() throws IOException {
+
+ if (lastReturn == DocIdSetIterator.NO_MORE_DOCS) return DocIdSetIterator.NO_MORE_DOCS;
+
+ DocIdSetIterator dcit = iterators[0];
+ int target = dcit.nextDoc();
+ int size = iterators.length;
+ int skip = 0;
+ int i = 1;
+ while (i < size) {
+ if (i != skip) {
+ dcit = iterators[i];
+ int docid = dcit.advance(target);
+ if (docid > target) {
+ target = docid;
+ if (i != 0) {
+ skip = i;
+ i = 0;
+ continue;
+ } else
+ skip = 0;
+ }
+ }
+ i++;
+ }
+ return (lastReturn = target);
+ }
+
+ @Override
+ public final int advance(int target) throws IOException {
+
+ if (lastReturn == DocIdSetIterator.NO_MORE_DOCS) return DocIdSetIterator.NO_MORE_DOCS;
+
+ DocIdSetIterator dcit = iterators[0];
+ target = dcit.advance(target);
+ int size = iterators.length;
+ int skip = 0;
+ int i = 1;
+ while (i < size) {
+ if (i != skip) {
+ dcit = iterators[i];
+ int docid = dcit.advance(target);
+ if (docid > target) {
+ target = docid;
+ if (i != 0) {
+ skip = i;
+ i = 0;
+ continue;
+ } else {
+ skip = 0;
+ }
+ }
+ }
+ i++;
+ }
+ return (lastReturn = target);
+ }
+ }
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/docset/DocSets.java b/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/docset/DocSets.java
index 9eb3445f71b41..4049b6ae82556 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/docset/DocSets.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/docset/DocSets.java
@@ -31,6 +31,22 @@
*/
public class DocSets {
+ public static DocSet convert(IndexReader reader, DocIdSet docIdSet) throws IOException {
+ if (docIdSet == null) {
+ return DocSet.EMPTY_DOC_SET;
+ } else if (docIdSet instanceof DocSet) {
+ return (DocSet) docIdSet;
+ } else if (docIdSet instanceof OpenBitSet) {
+ return new OpenBitDocSet((OpenBitSet) docIdSet);
+ } else {
+ final DocIdSetIterator it = docIdSet.iterator();
+ // null is allowed to be returned by iterator(),
+ // in this case we wrap with the empty set,
+ // which is cacheable.
+ return (it == null) ? DocSet.EMPTY_DOC_SET : new OpenBitDocSet(it, reader.maxDoc());
+ }
+ }
+
/**
* Returns a cacheable version of the doc id set (might be the same instance provided as a parameter).
*/
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/docset/NotDocSet.java b/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/docset/NotDocSet.java
new file mode 100644
index 0000000000000..535bd0c4675ab
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/docset/NotDocSet.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.util.lucene.docset;
+
+import org.apache.lucene.search.DocIdSetIterator;
+
+import java.io.IOException;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class NotDocSet extends DocSet {
+
+ private final DocSet set;
+
+ private final int max;
+
+ public NotDocSet(DocSet set, int max) {
+ this.set = set;
+ this.max = max;
+ }
+
+ @Override public boolean isCacheable() {
+ return set.isCacheable();
+ }
+
+ @Override public boolean get(int doc) throws IOException {
+ return !set.get(doc);
+ }
+
+ @Override public DocIdSetIterator iterator() throws IOException {
+ return new NotDocIdSetIterator();
+ }
+
+ class NotDocIdSetIterator extends DocIdSetIterator {
+ int lastReturn = -1;
+ private DocIdSetIterator it1 = null;
+ private int innerDocid = -1;
+
+ NotDocIdSetIterator() throws IOException {
+ initialize();
+ }
+
+ private void initialize() throws IOException {
+ it1 = set.iterator();
+
+ try {
+ if ((innerDocid = it1.nextDoc()) == DocIdSetIterator.NO_MORE_DOCS) it1 = null;
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ @Override
+ public int docID() {
+ return lastReturn;
+ }
+
+ @Override
+ public int nextDoc() throws IOException {
+ return advance(0);
+ }
+
+ @Override
+ public int advance(int target) throws IOException {
+
+ if (lastReturn == DocIdSetIterator.NO_MORE_DOCS) {
+ return DocIdSetIterator.NO_MORE_DOCS;
+ }
+
+ if (target <= lastReturn) target = lastReturn + 1;
+
+ if (it1 != null && innerDocid < target) {
+ if ((innerDocid = it1.advance(target)) == DocIdSetIterator.NO_MORE_DOCS) {
+ it1 = null;
+ }
+ }
+
+ while (it1 != null && innerDocid == target) {
+ target++;
+ if (target >= max) {
+ return (lastReturn = DocIdSetIterator.NO_MORE_DOCS);
+ }
+ if ((innerDocid = it1.advance(target)) == DocIdSetIterator.NO_MORE_DOCS) {
+ it1 = null;
+ }
+ }
+ return (lastReturn = target);
+ }
+ }
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/docset/OrDocSet.java b/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/docset/OrDocSet.java
new file mode 100644
index 0000000000000..7a179b48f314f
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/docset/OrDocSet.java
@@ -0,0 +1,199 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.util.lucene.docset;
+
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.DocIdSetIterator;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class OrDocSet extends DocSet {
+
+ private final List<DocSet> sets;
+
+ public OrDocSet(List<DocSet> sets) {
+ this.sets = sets;
+ }
+
+ @Override public boolean get(int doc) throws IOException {
+ for (DocSet s : sets) {
+ if (s.get(doc)) return true;
+ }
+ return false;
+ }
+
+ @Override public boolean isCacheable() {
+ for (DocSet set : sets) {
+ if (!set.isCacheable()) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ @Override public DocIdSetIterator iterator() throws IOException {
+ return new OrDocIdSetIterator();
+ }
+
+ public class OrDocIdSetIterator extends DocIdSetIterator {
+
+ private final class Item {
+ public final DocIdSetIterator iter;
+ public int doc;
+
+ public Item(DocIdSetIterator iter) {
+ this.iter = iter;
+ this.doc = -1;
+ }
+ }
+
+ private int _curDoc;
+ private final Item[] _heap;
+ private int _size;
+
+ OrDocIdSetIterator() throws IOException {
+ _curDoc = -1;
+ _heap = new Item[sets.size()];
+ _size = 0;
+ for (DocIdSet set : sets) {
+ _heap[_size++] = new Item(set.iterator());
+ }
+ if (_size == 0) _curDoc = DocIdSetIterator.NO_MORE_DOCS;
+ }
+
+ @Override
+ public final int docID() {
+ return _curDoc;
+ }
+
+ @Override
+ public final int nextDoc() throws IOException {
+ if (_curDoc == DocIdSetIterator.NO_MORE_DOCS) return DocIdSetIterator.NO_MORE_DOCS;
+
+ Item top = _heap[0];
+ while (true) {
+ DocIdSetIterator topIter = top.iter;
+ int docid;
+ if ((docid = topIter.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
+ top.doc = docid;
+ heapAdjust();
+ } else {
+ heapRemoveRoot();
+ if (_size == 0) return (_curDoc = DocIdSetIterator.NO_MORE_DOCS);
+ }
+ top = _heap[0];
+ int topDoc = top.doc;
+ if (topDoc > _curDoc) {
+ return (_curDoc = topDoc);
+ }
+ }
+ }
+
+ @Override
+ public final int advance(int target) throws IOException {
+ if (_curDoc == DocIdSetIterator.NO_MORE_DOCS) return DocIdSetIterator.NO_MORE_DOCS;
+
+ if (target <= _curDoc) target = _curDoc + 1;
+
+ Item top = _heap[0];
+ while (true) {
+ DocIdSetIterator topIter = top.iter;
+ int docid;
+ if ((docid = topIter.advance(target)) != DocIdSetIterator.NO_MORE_DOCS) {
+ top.doc = docid;
+ heapAdjust();
+ } else {
+ heapRemoveRoot();
+ if (_size == 0) return (_curDoc = DocIdSetIterator.NO_MORE_DOCS);
+ }
+ top = _heap[0];
+ int topDoc = top.doc;
+ if (topDoc >= target) {
+ return (_curDoc = topDoc);
+ }
+ }
+ }
+
+// Organize subScorers into a min heap with scorers generating the earlest document on top.
+ /*
+ private final void heapify() {
+ int size = _size;
+ for (int i=(size>>1)-1; i>=0; i--)
+ heapAdjust(i);
+ }
+ */
+ /* The subtree of subScorers at root is a min heap except possibly for its root element.
+ * Bubble the root down as required to make the subtree a heap.
+ */
+
+ private final void heapAdjust() {
+ final Item[] heap = _heap;
+ final Item top = heap[0];
+ final int doc = top.doc;
+ final int size = _size;
+ int i = 0;
+
+ while (true) {
+ int lchild = (i << 1) + 1;
+ if (lchild >= size) break;
+
+ Item left = heap[lchild];
+ int ldoc = left.doc;
+
+ int rchild = lchild + 1;
+ if (rchild < size) {
+ Item right = heap[rchild];
+ int rdoc = right.doc;
+
+ if (rdoc <= ldoc) {
+ if (doc <= rdoc) break;
+
+ heap[i] = right;
+ i = rchild;
+ continue;
+ }
+ }
+
+ if (doc <= ldoc) break;
+
+ heap[i] = left;
+ i = lchild;
+ }
+ heap[i] = top;
+ }
+
+ // Remove the root Scorer from subScorers and re-establish it as a heap
+
+ private void heapRemoveRoot() {
+ _size--;
+ if (_size > 0) {
+ Item tmp = _heap[0];
+ _heap[0] = _heap[_size];
+ _heap[_size] = tmp; // keep the finished iterator at the end for debugging
+ heapAdjust();
+ }
+ }
+
+ }
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/search/AndFilter.java b/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/search/AndFilter.java
new file mode 100644
index 0000000000000..66f4862ebc998
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/search/AndFilter.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.util.lucene.search;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.Filter;
+import org.elasticsearch.util.collect.Lists;
+import org.elasticsearch.util.lucene.docset.AndDocSet;
+import org.elasticsearch.util.lucene.docset.DocSet;
+import org.elasticsearch.util.lucene.docset.DocSets;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class AndFilter extends Filter {
+
+ private final List<? extends Filter> filters;
+
+ public AndFilter(List<? extends Filter> filters) {
+ this.filters = filters;
+ }
+
+ public List<? extends Filter> filters() {
+ return filters;
+ }
+
+ @Override public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
+ if (filters.size() == 1) {
+ return DocSets.convert(reader, filters.get(0).getDocIdSet(reader));
+ }
+ List<DocSet> sets = Lists.newArrayListWithExpectedSize(filters.size());
+ for (Filter filter : filters) {
+ sets.add(DocSets.convert(reader, filter.getDocIdSet(reader)));
+ }
+ return new AndDocSet(sets);
+ }
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/search/NotFilter.java b/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/search/NotFilter.java
new file mode 100644
index 0000000000000..3a16dffd67bd8
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/search/NotFilter.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.util.lucene.search;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.Filter;
+import org.elasticsearch.util.lucene.docset.DocSets;
+import org.elasticsearch.util.lucene.docset.NotDocSet;
+
+import java.io.IOException;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class NotFilter extends Filter {
+
+ private final Filter filter;
+
+ public NotFilter(Filter filter) {
+ this.filter = filter;
+ }
+
+ public Filter filter() {
+ return filter;
+ }
+
+ @Override public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
+ return new NotDocSet(DocSets.convert(reader, filter.getDocIdSet(reader)), reader.maxDoc());
+ }
+}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/search/OrFilter.java b/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/search/OrFilter.java
new file mode 100644
index 0000000000000..420d659cdfced
--- /dev/null
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/util/lucene/search/OrFilter.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to Elastic Search and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Elastic Search licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.util.lucene.search;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.Filter;
+import org.elasticsearch.util.collect.Lists;
+import org.elasticsearch.util.lucene.docset.DocSet;
+import org.elasticsearch.util.lucene.docset.DocSets;
+import org.elasticsearch.util.lucene.docset.OrDocSet;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * @author kimchy (shay.banon)
+ */
+public class OrFilter extends Filter {
+
+ private final List<? extends Filter> filters;
+
+ public OrFilter(List<? extends Filter> filters) {
+ this.filters = filters;
+ }
+
+ public List<? extends Filter> filters() {
+ return filters;
+ }
+
+ @Override public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
+ if (filters.size() == 1) {
+ return DocSets.convert(reader, filters.get(0).getDocIdSet(reader));
+ }
+ List<DocSet> sets = Lists.newArrayListWithExpectedSize(filters.size());
+ for (Filter filter : filters) {
+ sets.add(DocSets.convert(reader, filter.getDocIdSet(reader)));
+ }
+ return new OrDocSet(sets);
+ }
+}
\ No newline at end of file
diff --git a/modules/elasticsearch/src/test/java/org/elasticsearch/index/query/xcontent/SimpleIndexQueryParserTests.java b/modules/elasticsearch/src/test/java/org/elasticsearch/index/query/xcontent/SimpleIndexQueryParserTests.java
index ba29717f451ea..88c212eeca758 100644
--- a/modules/elasticsearch/src/test/java/org/elasticsearch/index/query/xcontent/SimpleIndexQueryParserTests.java
+++ b/modules/elasticsearch/src/test/java/org/elasticsearch/index/query/xcontent/SimpleIndexQueryParserTests.java
@@ -30,10 +30,7 @@
import org.elasticsearch.index.engine.robin.RobinIndexEngine;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParser;
-import org.elasticsearch.util.lucene.search.CustomBoostFactorQuery;
-import org.elasticsearch.util.lucene.search.MoreLikeThisQuery;
-import org.elasticsearch.util.lucene.search.Queries;
-import org.elasticsearch.util.lucene.search.TermFilter;
+import org.elasticsearch.util.lucene.search.*;
import org.testng.annotations.Test;
import java.io.IOException;
@@ -491,6 +488,77 @@ public class SimpleIndexQueryParserTests {
// TODO get the content and test
}
+ @Test public void testAndFilteredQueryBuilder() throws IOException {
+ IndexQueryParser queryParser = newQueryParser();
+ Query parsedQuery = queryParser.parse(filtered(matchAllQuery(), andFilter(termFilter("name.first", "shay1"), termFilter("name.first", "shay4"))));
+ assertThat(parsedQuery, instanceOf(FilteredQuery.class));
+ FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
+
+ AndFilter andFilter = (AndFilter) filteredQuery.getFilter();
+ assertThat(andFilter.filters().size(), equalTo(2));
+ assertThat(((TermFilter) andFilter.filters().get(0)).getTerm(), equalTo(new Term("name.first", "shay1")));
+ assertThat(((TermFilter) andFilter.filters().get(1)).getTerm(), equalTo(new Term("name.first", "shay4")));
+ }
+
+ @Test public void testAndFilteredQuery() throws IOException {
+ IndexQueryParser queryParser = newQueryParser();
+ String query = copyToStringFromClasspath("/org/elasticsearch/index/query/xcontent/and-filter.json");
+ Query parsedQuery = queryParser.parse(query);
+ assertThat(parsedQuery, instanceOf(FilteredQuery.class));
+ FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
+
+ AndFilter andFilter = (AndFilter) filteredQuery.getFilter();
+ assertThat(andFilter.filters().size(), equalTo(2));
+ assertThat(((TermFilter) andFilter.filters().get(0)).getTerm(), equalTo(new Term("name.first", "shay1")));
+ assertThat(((TermFilter) andFilter.filters().get(1)).getTerm(), equalTo(new Term("name.first", "shay4")));
+ }
+
+ @Test public void testOrFilteredQueryBuilder() throws IOException {
+ IndexQueryParser queryParser = newQueryParser();
+ Query parsedQuery = queryParser.parse(filtered(matchAllQuery(), orFilter(termFilter("name.first", "shay1"), termFilter("name.first", "shay4"))));
+ assertThat(parsedQuery, instanceOf(FilteredQuery.class));
+ FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
+
+ OrFilter andFilter = (OrFilter) filteredQuery.getFilter();
+ assertThat(andFilter.filters().size(), equalTo(2));
+ assertThat(((TermFilter) andFilter.filters().get(0)).getTerm(), equalTo(new Term("name.first", "shay1")));
+ assertThat(((TermFilter) andFilter.filters().get(1)).getTerm(), equalTo(new Term("name.first", "shay4")));
+ }
+
+ @Test public void testOrFilteredQuery() throws IOException {
+ IndexQueryParser queryParser = newQueryParser();
+ String query = copyToStringFromClasspath("/org/elasticsearch/index/query/xcontent/or-filter.json");
+ Query parsedQuery = queryParser.parse(query);
+ assertThat(parsedQuery, instanceOf(FilteredQuery.class));
+ FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
+
+ OrFilter orFilter = (OrFilter) filteredQuery.getFilter();
+ assertThat(orFilter.filters().size(), equalTo(2));
+ assertThat(((TermFilter) orFilter.filters().get(0)).getTerm(), equalTo(new Term("name.first", "shay1")));
+ assertThat(((TermFilter) orFilter.filters().get(1)).getTerm(), equalTo(new Term("name.first", "shay4")));
+ }
+
+ @Test public void testNotFilteredQueryBuilder() throws IOException {
+ IndexQueryParser queryParser = newQueryParser();
+ Query parsedQuery = queryParser.parse(filtered(matchAllQuery(), notFilter(termFilter("name.first", "shay1"))));
+ assertThat(parsedQuery, instanceOf(FilteredQuery.class));
+ FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
+
+ NotFilter notFilter = (NotFilter) filteredQuery.getFilter();
+ assertThat(((TermFilter) notFilter.filter()).getTerm(), equalTo(new Term("name.first", "shay1")));
+ }
+
+ @Test public void testNotFilteredQuery() throws IOException {
+ IndexQueryParser queryParser = newQueryParser();
+ String query = copyToStringFromClasspath("/org/elasticsearch/index/query/xcontent/not-filter.json");
+ Query parsedQuery = queryParser.parse(query);
+ assertThat(parsedQuery, instanceOf(FilteredQuery.class));
+ FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
+
+ NotFilter notFilter = (NotFilter) filteredQuery.getFilter();
+ assertThat(((TermFilter) notFilter.filter()).getTerm(), equalTo(new Term("name.first", "shay1")));
+ }
+
@Test public void testBoolQueryBuilder() throws IOException {
IndexQueryParser queryParser = newQueryParser();
Query parsedQuery = queryParser.parse(boolQuery().must(termQuery("content", "test1")).must(termQuery("content", "test4")).mustNot(termQuery("content", "test2")).should(termQuery("content", "test3")));
diff --git a/modules/elasticsearch/src/test/java/org/elasticsearch/index/query/xcontent/and-filter.json b/modules/elasticsearch/src/test/java/org/elasticsearch/index/query/xcontent/and-filter.json
new file mode 100644
index 0000000000000..7a01692063e52
--- /dev/null
+++ b/modules/elasticsearch/src/test/java/org/elasticsearch/index/query/xcontent/and-filter.json
@@ -0,0 +1,19 @@
+{
+ "filtered" : {
+ "query" : {
+ "term" : { "name.first" : "shay" }
+ },
+ "filter" : {
+ "and" : {
+ "filters" : [
+ {
+ "term" : { "name.first" : "shay1" }
+ },
+ {
+ "term" : { "name.first" : "shay4" }
+ }
+ ]
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/modules/elasticsearch/src/test/java/org/elasticsearch/index/query/xcontent/not-filter.json b/modules/elasticsearch/src/test/java/org/elasticsearch/index/query/xcontent/not-filter.json
new file mode 100644
index 0000000000000..fee4ef92c3e57
--- /dev/null
+++ b/modules/elasticsearch/src/test/java/org/elasticsearch/index/query/xcontent/not-filter.json
@@ -0,0 +1,14 @@
+{
+ "filtered" : {
+ "query" : {
+ "term" : { "name.first" : "shay" }
+ },
+ "filter" : {
+ "not" : {
+ "filter" : {
+ "term" : { "name.first" : "shay1" }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/modules/elasticsearch/src/test/java/org/elasticsearch/index/query/xcontent/or-filter.json b/modules/elasticsearch/src/test/java/org/elasticsearch/index/query/xcontent/or-filter.json
new file mode 100644
index 0000000000000..7bcf560691f1c
--- /dev/null
+++ b/modules/elasticsearch/src/test/java/org/elasticsearch/index/query/xcontent/or-filter.json
@@ -0,0 +1,19 @@
+{
+ "filtered" : {
+ "query" : {
+ "term" : { "name.first" : "shay" }
+ },
+ "filter" : {
+ "or" : {
+ "filters" : [
+ {
+ "term" : { "name.first" : "shay1" }
+ },
+ {
+ "term" : { "name.first" : "shay4" }
+ }
+ ]
+ }
+ }
+ }
+}
\ No newline at end of file
|
b590cc49acba625d3d076a9722f2ff87a3400be4
|
hbase
|
HBASE-2124 Useless exception in HMaster on- startup--git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@899441 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index ddc9da33c16f..5fc3789a5104 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -163,6 +163,7 @@ Release 0.21.0 - Unreleased
Purtell)
HBASE-2122 [stargate] Initializing scanner column families doesn't work
(Greg Lu via Andrew Purtell)
+ HBASE-2124 Useless exception in HMaster on startup
IMPROVEMENTS
HBASE-1760 Cleanup TODOs in HTable
diff --git a/src/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWrapper.java b/src/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWrapper.java
index cccd9d38ee8f..57de37bdae54 100644
--- a/src/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWrapper.java
+++ b/src/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWrapper.java
@@ -374,6 +374,7 @@ private HServerAddress readAddress(String znode, Watcher watcher) {
try {
return readAddressOrThrow(znode, watcher);
} catch (IOException e) {
+ LOG.debug("Failed to read " + e.getMessage());
return null;
}
}
|
ce2995c49d111b5749a88b4de2065a3a68551386
|
hbase
|
HBASE-1136 HashFunction inadvertently destroys- some randomness; REVERTING--git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@735880 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index 7e9c80101764..970250b4eaeb 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -3,8 +3,6 @@ Release 0.20.0 - Unreleased
INCOMPATIBLE CHANGES
BUG FIXES
- HBASE-1136 HashFunction inadvertently destroys some randomness
- (Jonathan Ellis via Stack)
HBASE-1140 "ant clean test" fails (Nitay Joffe via Stack)
IMPROVEMENTS
diff --git a/src/java/org/onelab/filter/HashFunction.java b/src/java/org/onelab/filter/HashFunction.java
index cf97c7bcaa26..a0c26964e2f6 100644
--- a/src/java/org/onelab/filter/HashFunction.java
+++ b/src/java/org/onelab/filter/HashFunction.java
@@ -118,8 +118,7 @@ public int[] hash(Key k){
}
int[] result = new int[nbHash];
for (int i = 0, initval = 0; i < nbHash; i++) {
- initval = hashFunction.hash(b, initval);
- result[i] = Math.abs(initval) % maxValue;
+ initval = result[i] = Math.abs(hashFunction.hash(b, initval) % maxValue);
}
return result;
}//end hash()
diff --git a/src/test/org/onelab/test/TestFilter.java b/src/test/org/onelab/test/TestFilter.java
index 363fc9451481..6c88c1ab33f4 100644
--- a/src/test/org/onelab/test/TestFilter.java
+++ b/src/test/org/onelab/test/TestFilter.java
@@ -274,7 +274,7 @@ public void testCountingBloomFilter() throws UnsupportedEncodingException {
bf.add(k2);
bf.add(k3);
assertTrue(bf.membershipTest(key));
- assertFalse(bf.membershipTest(k2));
+ assertTrue(bf.membershipTest(new StringKey("graknyl")));
assertFalse(bf.membershipTest(new StringKey("xyzzy")));
assertFalse(bf.membershipTest(new StringKey("abcd")));
@@ -287,7 +287,7 @@ public void testCountingBloomFilter() throws UnsupportedEncodingException {
bf2.add(key);
bf.or(bf2);
assertTrue(bf.membershipTest(key));
- assertTrue(bf.membershipTest(k2));
+ assertTrue(bf.membershipTest(new StringKey("graknyl")));
assertFalse(bf.membershipTest(new StringKey("xyzzy")));
assertFalse(bf.membershipTest(new StringKey("abcd")));
|
01de4037a2111d2ded5cbf72a444e3e4088d95da
|
intellij-community
|
nullable problems: disable warnings on- "complex" getters/setters (IDEA-63958)--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/java/java-impl/src/com/intellij/codeInspection/nullable/NullableStuffInspection.java b/java/java-impl/src/com/intellij/codeInspection/nullable/NullableStuffInspection.java
index 499a3452e92d6..2dec04e471098 100644
--- a/java/java-impl/src/com/intellij/codeInspection/nullable/NullableStuffInspection.java
+++ b/java/java-impl/src/com/intellij/codeInspection/nullable/NullableStuffInspection.java
@@ -34,6 +34,7 @@
import com.intellij.psi.search.searches.OverridingMethodsSearch;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.util.*;
+import com.intellij.refactoring.psi.PropertyUtils;
import com.intellij.util.ArrayUtil;
import com.intellij.util.Processor;
import com.intellij.util.Query;
@@ -114,14 +115,16 @@ public void visitMethodCallExpression(PsiMethodCallExpression expression) {
ProblemHighlightType.GENERIC_ERROR_OR_WARNING, new AnnotateMethodFix(anno, ArrayUtil.toStringArray(annoToRemove)));
}
}
- if (annotated.isDeclaredNotNull && manager.isNullable(getter, false)) {
- holder.registerProblem(nameIdentifier, InspectionsBundle.message(
- "inspection.nullable.problems.annotated.field.getter.conflict", StringUtil.getShortName(anno), nullableSimpleName),
- ProblemHighlightType.GENERIC_ERROR_OR_WARNING, new AnnotateMethodFix(anno, ArrayUtil.toStringArray(annoToRemove)));
- } else if (annotated.isDeclaredNullable && manager.isNotNull(getter, false)) {
- holder.registerProblem(nameIdentifier, InspectionsBundle.message(
- "inspection.nullable.problems.annotated.field.getter.conflict", StringUtil.getShortName(anno), notNullSimpleName),
- ProblemHighlightType.GENERIC_ERROR_OR_WARNING, new AnnotateMethodFix(anno, ArrayUtil.toStringArray(annoToRemove)));
+ if (PropertyUtils.isSimpleGetter(getter)) {
+ if (annotated.isDeclaredNotNull && manager.isNullable(getter, false)) {
+ holder.registerProblem(nameIdentifier, InspectionsBundle.message(
+ "inspection.nullable.problems.annotated.field.getter.conflict", StringUtil.getShortName(anno), nullableSimpleName),
+ ProblemHighlightType.GENERIC_ERROR_OR_WARNING, new AnnotateMethodFix(anno, ArrayUtil.toStringArray(annoToRemove)));
+ } else if (annotated.isDeclaredNullable && manager.isNotNull(getter, false)) {
+ holder.registerProblem(nameIdentifier, InspectionsBundle.message(
+ "inspection.nullable.problems.annotated.field.getter.conflict", StringUtil.getShortName(anno), notNullSimpleName),
+ ProblemHighlightType.GENERIC_ERROR_OR_WARNING, new AnnotateMethodFix(anno, ArrayUtil.toStringArray(annoToRemove)));
+ }
}
}
@@ -141,22 +144,24 @@ public void visitMethodCallExpression(PsiMethodCallExpression expression) {
ProblemHighlightType.GENERIC_ERROR_OR_WARNING,
new AddAnnotationFix(anno, parameter, ArrayUtil.toStringArray(annoToRemove)));
}
- if (annotated.isDeclaredNotNull && manager.isNullable(parameter, false)) {
- final PsiIdentifier nameIdentifier1 = parameter.getNameIdentifier();
- assert nameIdentifier1 != null : parameter;
- holder.registerProblem(nameIdentifier1, InspectionsBundle.message(
- "inspection.nullable.problems.annotated.field.setter.parameter.conflict",
- StringUtil.getShortName(anno), nullableSimpleName),
- ProblemHighlightType.GENERIC_ERROR_OR_WARNING,
- new AddAnnotationFix(anno, parameter, ArrayUtil.toStringArray(annoToRemove)));
- }
- else if (annotated.isDeclaredNullable && manager.isNotNull(parameter, false)) {
- final PsiIdentifier nameIdentifier1 = parameter.getNameIdentifier();
- assert nameIdentifier1 != null : parameter;
- holder.registerProblem(nameIdentifier1, InspectionsBundle.message(
- "inspection.nullable.problems.annotated.field.setter.parameter.conflict", StringUtil.getShortName(anno), notNullSimpleName),
- ProblemHighlightType.GENERIC_ERROR_OR_WARNING,
- new AddAnnotationFix(anno, parameter, ArrayUtil.toStringArray(annoToRemove)));
+ if (PropertyUtils.isSimpleSetter(setter)) {
+ if (annotated.isDeclaredNotNull && manager.isNullable(parameter, false)) {
+ final PsiIdentifier nameIdentifier1 = parameter.getNameIdentifier();
+ assert nameIdentifier1 != null : parameter;
+ holder.registerProblem(nameIdentifier1, InspectionsBundle.message(
+ "inspection.nullable.problems.annotated.field.setter.parameter.conflict",
+ StringUtil.getShortName(anno), nullableSimpleName),
+ ProblemHighlightType.GENERIC_ERROR_OR_WARNING,
+ new AddAnnotationFix(anno, parameter, ArrayUtil.toStringArray(annoToRemove)));
+ }
+ else if (annotated.isDeclaredNullable && manager.isNotNull(parameter, false)) {
+ final PsiIdentifier nameIdentifier1 = parameter.getNameIdentifier();
+ assert nameIdentifier1 != null : parameter;
+ holder.registerProblem(nameIdentifier1, InspectionsBundle.message(
+ "inspection.nullable.problems.annotated.field.setter.parameter.conflict", StringUtil.getShortName(anno), notNullSimpleName),
+ ProblemHighlightType.GENERIC_ERROR_OR_WARNING,
+ new AddAnnotationFix(anno, parameter, ArrayUtil.toStringArray(annoToRemove)));
+ }
}
if (containingClass == null) {
return;
diff --git a/java/java-tests/testData/inspection/nullableProblems/getterSetterProblems/src/Test.java b/java/java-tests/testData/inspection/nullableProblems/getterSetterProblems/src/Test.java
index a908c2c615cac..c4034e91c761a 100644
--- a/java/java-tests/testData/inspection/nullableProblems/getterSetterProblems/src/Test.java
+++ b/java/java-tests/testData/inspection/nullableProblems/getterSetterProblems/src/Test.java
@@ -38,4 +38,13 @@ public C getC() {
public void setC(@Nullable C c) {
this.c = c;
}
+
+ @NotNull C c1;
+ @Nullable
+ public C getC1() {
+ if (c1 != null) {
+ return null;
+ }
+ return c1;
+ }
}
\ No newline at end of file
|
9214f95cf4e5d19f5d226245043ea0669d276e59
|
hbase
|
HBASE-6170 Timeouts for row lock and scan should- be separate (Chris Trezzo)--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1354325 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/hbase
|
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
index 9dac3884c0be..82cf976a1b38 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
@@ -536,16 +536,25 @@ public static enum Modify {
public static String HBASE_CLIENT_INSTANCE_ID = "hbase.client.instance.id";
/**
- * HRegion server lease period in milliseconds. Clients must report in within this period
- * else they are considered dead. Unit measured in ms (milliseconds).
+ * The row lock timeout period in milliseconds.
*/
- public static String HBASE_REGIONSERVER_LEASE_PERIOD_KEY =
- "hbase.regionserver.lease.period";
+ public static String HBASE_REGIONSERVER_ROWLOCK_TIMEOUT_PERIOD =
+ "hbase.regionserver.rowlock.timeout.period";
/**
- * Default value of {@link #HBASE_REGIONSERVER_LEASE_PERIOD_KEY}.
+ * Default value of {@link #HBASE_REGIONSERVER_ROWLOCK_TIMEOUT_PERIOD}.
*/
- public static long DEFAULT_HBASE_REGIONSERVER_LEASE_PERIOD = 60000;
+ public static int DEFAULT_HBASE_REGIONSERVER_ROWLOCK_TIMEOUT_PERIOD = 60000;
+
+ /**
+ * The client scanner timeout period in milliseconds.
+ */
+ public static String HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD = "hbase.client.scanner.timeout.period";
+
+ /**
+ * Default value of {@link #HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD}.
+ */
+ public static int DEFAULT_HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD = 60000;
/**
* timeout for each RPC
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
index 32ddf120c62b..ccb1dc3f79b5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
@@ -106,9 +106,8 @@ public ClientScanner(final Configuration conf, final Scan scan,
HConstants.HBASE_CLIENT_SCANNER_MAX_RESULT_SIZE_KEY,
HConstants.DEFAULT_HBASE_CLIENT_SCANNER_MAX_RESULT_SIZE);
}
- this.scannerTimeout = (int) conf.getLong(
- HConstants.HBASE_REGIONSERVER_LEASE_PERIOD_KEY,
- HConstants.DEFAULT_HBASE_REGIONSERVER_LEASE_PERIOD);
+ this.scannerTimeout = conf.getInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD,
+ HConstants.DEFAULT_HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD);
// check if application wants to collect scan metrics
byte[] enableMetrics = scan.getAttribute(
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index 3556a7c05586..bad1d12df793 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -422,6 +422,16 @@ public class HRegionServer implements ClientProtocol,
*/
private MovedRegionsCleaner movedRegionsCleaner;
+ /**
+ * The lease timeout period for row locks (milliseconds).
+ */
+ private final int rowLockLeaseTimeoutPeriod;
+
+ /**
+ * The lease timeout period for client scanners (milliseconds).
+ */
+ private final int scannerLeaseTimeoutPeriod;
+
/**
* Starts a HRegionServer at the default location
@@ -466,6 +476,13 @@ public HRegionServer(Configuration conf)
this.abortRequested = false;
this.stopped = false;
+ this.rowLockLeaseTimeoutPeriod = conf.getInt(
+ HConstants.HBASE_REGIONSERVER_ROWLOCK_TIMEOUT_PERIOD,
+ HConstants.DEFAULT_HBASE_REGIONSERVER_ROWLOCK_TIMEOUT_PERIOD);
+
+ this.scannerLeaseTimeoutPeriod = conf.getInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD,
+ HConstants.DEFAULT_HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD);
+
// Server to handle client requests.
String hostname = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
conf.get("hbase.regionserver.dns.interface", "default"),
@@ -705,10 +722,7 @@ private void initializeThreads() throws IOException {
this.compactionChecker = new CompactionChecker(this,
this.threadWakeFrequency * multiplier, this);
- this.leases = new Leases((int) conf.getLong(
- HConstants.HBASE_REGIONSERVER_LEASE_PERIOD_KEY,
- HConstants.DEFAULT_HBASE_REGIONSERVER_LEASE_PERIOD),
- this.threadWakeFrequency);
+ this.leases = new Leases(this.threadWakeFrequency);
// Create the thread for the ThriftServer.
if (conf.getBoolean("hbase.regionserver.export.thrift", false)) {
@@ -2658,7 +2672,8 @@ protected long addRowLock(Integer r, HRegion region)
long lockId = nextLong();
String lockName = String.valueOf(lockId);
rowlocks.put(lockName, r);
- this.leases.createLease(lockName, new RowLockListener(lockName, region));
+ this.leases.createLease(lockName, this.rowLockLeaseTimeoutPeriod, new RowLockListener(lockName,
+ region));
return lockId;
}
@@ -2666,7 +2681,8 @@ protected long addScanner(RegionScanner s) throws LeaseStillHeldException {
long scannerId = nextLong();
String scannerName = String.valueOf(scannerId);
scanners.put(scannerName, s);
- this.leases.createLease(scannerName, new ScannerListener(scannerName));
+ this.leases.createLease(scannerName, this.scannerLeaseTimeoutPeriod, new ScannerListener(
+ scannerName));
return scannerId;
}
@@ -2925,7 +2941,7 @@ public ScanResponse scan(final RpcController controller,
}
scannerId = addScanner(scanner);
scannerName = String.valueOf(scannerId);
- ttl = leases.leasePeriod;
+ ttl = this.scannerLeaseTimeoutPeriod;
}
if (rows > 0) {
@@ -2999,7 +3015,7 @@ public ScanResponse scan(final RpcController controller,
// Adding resets expiration time on lease.
if (scanners.containsKey(scannerName)) {
if (lease != null) leases.addLease(lease);
- ttl = leases.leasePeriod;
+ ttl = this.scannerLeaseTimeoutPeriod;
}
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java
index 0b7ed0e3861d..f2bd5680487d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java
@@ -55,7 +55,6 @@
@InterfaceAudience.Private
public class Leases extends HasThread {
private static final Log LOG = LogFactory.getLog(Leases.class.getName());
- protected final int leasePeriod;
private final int leaseCheckFrequency;
private volatile DelayQueue<Lease> leaseQueue = new DelayQueue<Lease>();
protected final Map<String, Lease> leases = new HashMap<String, Lease>();
@@ -63,13 +62,11 @@ public class Leases extends HasThread {
/**
* Creates a lease monitor
- *
- * @param leasePeriod - length of time (milliseconds) that the lease is valid
+ *
* @param leaseCheckFrequency - how often the lease should be checked
- * (milliseconds)
+ * (milliseconds)
*/
- public Leases(final int leasePeriod, final int leaseCheckFrequency) {
- this.leasePeriod = leasePeriod;
+ public Leases(final int leaseCheckFrequency) {
this.leaseCheckFrequency = leaseCheckFrequency;
setDaemon(true);
}
@@ -135,15 +132,16 @@ public void close() {
}
/**
- * Obtain a lease
- *
+ * Obtain a lease.
+ *
* @param leaseName name of the lease
+ * @param leaseTimeoutPeriod length of the lease in milliseconds
* @param listener listener that will process lease expirations
* @throws LeaseStillHeldException
*/
- public void createLease(String leaseName, final LeaseListener listener)
- throws LeaseStillHeldException {
- addLease(new Lease(leaseName, listener));
+ public void createLease(String leaseName, int leaseTimeoutPeriod, final LeaseListener listener)
+ throws LeaseStillHeldException {
+ addLease(new Lease(leaseName, leaseTimeoutPeriod, listener));
}
/**
@@ -155,7 +153,7 @@ public void addLease(final Lease lease) throws LeaseStillHeldException {
if (this.stopRequested) {
return;
}
- lease.setExpirationTime(System.currentTimeMillis() + this.leasePeriod);
+ lease.resetExpirationTime();
synchronized (leaseQueue) {
if (leases.containsKey(lease.getLeaseName())) {
throw new LeaseStillHeldException(lease.getLeaseName());
@@ -202,7 +200,7 @@ public void renewLease(final String leaseName) throws LeaseException {
throw new LeaseException("lease '" + leaseName +
"' does not exist or has already expired");
}
- lease.setExpirationTime(System.currentTimeMillis() + leasePeriod);
+ lease.resetExpirationTime();
leaseQueue.add(lease);
}
}
@@ -241,16 +239,14 @@ Lease removeLease(final String leaseName) throws LeaseException {
static class Lease implements Delayed {
private final String leaseName;
private final LeaseListener listener;
+ private int leaseTimeoutPeriod;
private long expirationTime;
- Lease(final String leaseName, LeaseListener listener) {
- this(leaseName, listener, 0);
- }
-
- Lease(final String leaseName, LeaseListener listener, long expirationTime) {
+ Lease(final String leaseName, int leaseTimeoutPeriod, LeaseListener listener) {
this.leaseName = leaseName;
this.listener = listener;
- this.expirationTime = expirationTime;
+ this.leaseTimeoutPeriod = leaseTimeoutPeriod;
+ this.expirationTime = 0;
}
/** @return the lease name */
@@ -294,9 +290,11 @@ public int compareTo(Delayed o) {
return this.equals(o) ? 0 : (delta > 0 ? 1 : -1);
}
- /** @param expirationTime the expirationTime to set */
- public void setExpirationTime(long expirationTime) {
- this.expirationTime = expirationTime;
+ /**
+ * Resets the expiration time of the lease.
+ */
+ public void resetExpirationTime() {
+ this.expirationTime = System.currentTimeMillis() + this.leaseTimeoutPeriod;
}
}
-}
\ No newline at end of file
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java
index 1f9358324973..362c094ba9e5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java
@@ -25,7 +25,9 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.LargeTests;
import org.apache.hadoop.hbase.catalog.MetaReader;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.util.Bytes;
@@ -48,7 +50,7 @@ public class TestScannerTimeout {
private final static byte[] SOME_BYTES = Bytes.toBytes("f");
private final static byte[] TABLE_NAME = Bytes.toBytes("t");
private final static int NB_ROWS = 10;
- // Be careful w/ what you set this timer too... it can get in the way of
+ // Be careful w/ what you set this timer to... it can get in the way of
// the mini cluster coming up -- the verification in particular.
private final static int SCANNER_TIMEOUT = 10000;
private final static int SCANNER_CACHING = 5;
@@ -59,7 +61,7 @@ public class TestScannerTimeout {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
Configuration c = TEST_UTIL.getConfiguration();
- c.setInt("hbase.regionserver.lease.period", SCANNER_TIMEOUT);
+ c.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, SCANNER_TIMEOUT);
// We need more than one region server for this test
TEST_UTIL.startMiniCluster(2);
HTable table = TEST_UTIL.createTable(TABLE_NAME, SOME_BYTES);
@@ -134,8 +136,7 @@ public void test2772() throws Exception {
// Since the RS is already created, this conf is client-side only for
// this new table
Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
- conf.setInt(
- HConstants.HBASE_REGIONSERVER_LEASE_PERIOD_KEY, SCANNER_TIMEOUT*100);
+ conf.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, SCANNER_TIMEOUT * 100);
HTable higherScanTimeoutTable = new HTable(conf, TABLE_NAME);
ResultScanner r = higherScanTimeoutTable.getScanner(scan);
// This takes way less than SCANNER_TIMEOUT*100
@@ -201,8 +202,7 @@ public void test3686b() throws Exception {
// Since the RS is already created, this conf is client-side only for
// this new table
Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
- conf.setInt(
- HConstants.HBASE_REGIONSERVER_LEASE_PERIOD_KEY, SCANNER_TIMEOUT*100);
+ conf.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, SCANNER_TIMEOUT * 100);
HTable higherScanTimeoutTable = new HTable(conf, TABLE_NAME);
ResultScanner r = higherScanTimeoutTable.getScanner(scan);
int count = 1;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
index 7951b8a4dd22..25a0c0547483 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
@@ -325,8 +325,9 @@ Configuration initSplit() {
// Make lease timeout longer, lease checks less frequent
TEST_UTIL.getConfiguration().setInt(
"hbase.master.lease.thread.wakefrequency", 5 * 1000);
- TEST_UTIL.getConfiguration().setInt(
- "hbase.regionserver.lease.period", 10 * 1000);
+ TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, 10 * 1000);
+ TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_REGIONSERVER_ROWLOCK_TIMEOUT_PERIOD,
+ 10 * 1000);
// Increase the amount of time between client retries
TEST_UTIL.getConfiguration().setLong("hbase.client.pause", 15 * 1000);
// This size should make it so we always split using the addContent
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
index 08949facf2a5..fbd17ba89540 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
@@ -3797,7 +3797,8 @@ private Configuration initSplit() {
// Make lease timeout longer, lease checks less frequent
conf.setInt("hbase.master.lease.thread.wakefrequency", 5 * 1000);
- conf.setInt(HConstants.HBASE_REGIONSERVER_LEASE_PERIOD_KEY, 10 * 1000);
+ conf.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, 10 * 1000);
+ conf.setInt(HConstants.HBASE_REGIONSERVER_ROWLOCK_TIMEOUT_PERIOD, 10 * 1000);
// Increase the amount of time between client retries
conf.setLong("hbase.client.pause", 15 * 1000);
|
78930691ca6b664a07a6995c60bb8238cf7edf96
|
hbase
|
HBASE-9999 Add support for small reverse scan--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1573949 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/hbase
|
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
index 0290dcac922a..574d937b46d8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
@@ -39,7 +39,6 @@
import org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos;
import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.ExceptionUtil;
/**
* Implements the scanner interface for the HBase client.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java
index a980ec968f41..dd20f0a4984b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java
@@ -153,21 +153,23 @@ private boolean nextScanner(int nbRows, final boolean done,
LOG.trace("Advancing internal small scanner to startKey at '"
+ Bytes.toStringBinary(localStartKey) + "'");
}
- smallScanCallable = getSmallScanCallable(localStartKey, cacheNum);
+ smallScanCallable = getSmallScanCallable(
+ scan, getConnection(), getTable(), localStartKey, cacheNum);
if (this.scanMetrics != null && skipRowOfFirstResult == null) {
this.scanMetrics.countOfRegions.incrementAndGet();
}
return true;
}
- private RegionServerCallable<Result[]> getSmallScanCallable(
+ static RegionServerCallable<Result[]> getSmallScanCallable(
+ final Scan sc, HConnection connection, TableName table,
byte[] localStartKey, final int cacheNum) {
- this.scan.setStartRow(localStartKey);
+ sc.setStartRow(localStartKey);
RegionServerCallable<Result[]> callable = new RegionServerCallable<Result[]>(
- getConnection(), getTable(), scan.getStartRow()) {
+ connection, table, sc.getStartRow()) {
public Result[] call(int callTimeout) throws IOException {
ScanRequest request = RequestConverter.buildScanRequest(getLocation()
- .getRegionInfo().getRegionName(), scan, cacheNum, true);
+ .getRegionInfo().getRegionName(), sc, cacheNum, true);
PayloadCarryingRpcController controller = new PayloadCarryingRpcController();
controller.setPriority(getTableName());
controller.setCallTimeout(callTimeout);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
index f3d443670191..624389225cac 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
@@ -742,15 +742,24 @@ public ResultScanner getScanner(final Scan scan) throws IOException {
if (scan.getCaching() <= 0) {
scan.setCaching(getScannerCaching());
}
- if (scan.isSmall() && !scan.isReversed()) {
+
+ if (scan.isReversed()) {
+ if (scan.isSmall()) {
+ return new ClientSmallReversedScanner(getConfiguration(), scan, getName(),
+ this.connection);
+ } else {
+ return new ReversedClientScanner(getConfiguration(), scan, getName(),
+ this.connection);
+ }
+ }
+
+ if (scan.isSmall()) {
return new ClientSmallScanner(getConfiguration(), scan, getName(),
- this.connection);
- } else if (scan.isReversed()) {
- return new ReversedClientScanner(getConfiguration(), scan, getName(),
- this.connection);
+ this.connection, this.rpcCallerFactory);
+ } else {
+ return new ClientScanner(getConfiguration(), scan,
+ getName(), this.connection);
}
- return new ClientScanner(getConfiguration(), scan,
- getName(), this.connection);
}
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ReversedClientScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ReversedClientScanner.java
index 470ffa132fc6..d6e17ae8f552 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ReversedClientScanner.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ReversedClientScanner.java
@@ -29,6 +29,7 @@
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.ExceptionUtil;
/**
* A reversed client scanner which support backward scanning
@@ -114,6 +115,7 @@ protected boolean nextScanner(int nbRows, final boolean done)
this.scanMetrics.countOfRegions.incrementAndGet();
}
} catch (IOException e) {
+ ExceptionUtil.rethrowIfInterrupt(e);
close();
throw e;
}
@@ -151,7 +153,7 @@ protected boolean checkScanStopRow(final byte[] startKey) {
* @param row
* @return a new byte array which is the closest front row of the specified one
*/
- private byte[] createClosestRowBefore(byte[] row) {
+ protected byte[] createClosestRowBefore(byte[] row) {
if (row == null) {
throw new IllegalArgumentException("The passed row is empty");
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ReversedScannerCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ReversedScannerCallable.java
index 487777fc8f3d..a974b01c6f69 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ReversedScannerCallable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ReversedScannerCallable.java
@@ -70,7 +70,7 @@ public void prepare(boolean reload) throws IOException {
this.location = connection.getRegionLocation(tableName, row, reload);
if (this.location == null) {
throw new IOException("Failed to find location, tableName="
- + tableName + ", row=" + Bytes.toString(row) + ", reload="
+ + tableName + ", row=" + Bytes.toStringBinary(row) + ", reload="
+ reload);
}
} else {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index 33a18fb2c407..1520a65a1743 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -6061,4 +6061,170 @@ public void testReversedScanUnderMultiRegions() throws Exception {
assertEquals(insertNum, count);
table.close();
}
+
+
+ /**
+ * Tests reversed scan under multi regions
+ */
+ @Test
+ public void testSmallReversedScanUnderMultiRegions() throws Exception {
+ // Test Initialization.
+ byte[] TABLE = Bytes.toBytes("testSmallReversedScanUnderMultiRegions");
+ byte[][] splitRows = new byte[][]{
+ Bytes.toBytes("000"), Bytes.toBytes("002"), Bytes.toBytes("004"),
+ Bytes.toBytes("006"), Bytes.toBytes("008"), Bytes.toBytes("010")};
+ HTable table = TEST_UTIL.createTable(TABLE, FAMILY, splitRows);
+ TEST_UTIL.waitUntilAllRegionsAssigned(table.getName());
+
+ assertEquals(splitRows.length + 1, table.getRegionLocations().size());
+ for (byte[] splitRow : splitRows) {
+ Put put = new Put(splitRow);
+ put.add(FAMILY, QUALIFIER, VALUE);
+ table.put(put);
+
+ byte[] nextRow = Bytes.copy(splitRow);
+ nextRow[nextRow.length - 1]++;
+
+ put = new Put(nextRow);
+ put.add(FAMILY, QUALIFIER, VALUE);
+ table.put(put);
+ }
+
+ // scan forward
+ ResultScanner scanner = table.getScanner(new Scan());
+ int count = 0;
+ for (Result r : scanner) {
+ assertTrue(!r.isEmpty());
+ count++;
+ }
+ assertEquals(12, count);
+
+ reverseScanTest(table, false);
+ reverseScanTest(table, true);
+
+ table.close();
+ }
+
+ private void reverseScanTest(HTable table, boolean small) throws IOException {
+ // scan backward
+ Scan scan = new Scan();
+ scan.setReversed(true);
+ ResultScanner scanner = table.getScanner(scan);
+ int count = 0;
+ byte[] lastRow = null;
+ for (Result r : scanner) {
+ assertTrue(!r.isEmpty());
+ count++;
+ byte[] thisRow = r.getRow();
+ if (lastRow != null) {
+ assertTrue("Error scan order, last row= " + Bytes.toString(lastRow)
+ + ",this row=" + Bytes.toString(thisRow),
+ Bytes.compareTo(thisRow, lastRow) < 0);
+ }
+ lastRow = thisRow;
+ }
+ assertEquals(12, count);
+
+ scan = new Scan();
+ scan.setSmall(small);
+ scan.setReversed(true);
+ scan.setStartRow(Bytes.toBytes("002"));
+ scanner = table.getScanner(scan);
+ count = 0;
+ lastRow = null;
+ for (Result r : scanner) {
+ assertTrue(!r.isEmpty());
+ count++;
+ byte[] thisRow = r.getRow();
+ if (lastRow != null) {
+ assertTrue("Error scan order, last row= " + Bytes.toString(lastRow)
+ + ",this row=" + Bytes.toString(thisRow),
+ Bytes.compareTo(thisRow, lastRow) < 0);
+ }
+ lastRow = thisRow;
+ }
+ assertEquals(3, count); // 000 001 002
+
+ scan = new Scan();
+ scan.setSmall(small);
+ scan.setReversed(true);
+ scan.setStartRow(Bytes.toBytes("002"));
+ scan.setStopRow(Bytes.toBytes("000"));
+ scanner = table.getScanner(scan);
+ count = 0;
+ lastRow = null;
+ for (Result r : scanner) {
+ assertTrue(!r.isEmpty());
+ count++;
+ byte[] thisRow = r.getRow();
+ if (lastRow != null) {
+ assertTrue("Error scan order, last row= " + Bytes.toString(lastRow)
+ + ",this row=" + Bytes.toString(thisRow),
+ Bytes.compareTo(thisRow, lastRow) < 0);
+ }
+ lastRow = thisRow;
+ }
+ assertEquals(2, count); // 001 002
+
+ scan = new Scan();
+ scan.setSmall(small);
+ scan.setReversed(true);
+ scan.setStartRow(Bytes.toBytes("001"));
+ scanner = table.getScanner(scan);
+ count = 0;
+ lastRow = null;
+ for (Result r : scanner) {
+ assertTrue(!r.isEmpty());
+ count++;
+ byte[] thisRow = r.getRow();
+ if (lastRow != null) {
+ assertTrue("Error scan order, last row= " + Bytes.toString(lastRow)
+ + ",this row=" + Bytes.toString(thisRow),
+ Bytes.compareTo(thisRow, lastRow) < 0);
+ }
+ lastRow = thisRow;
+ }
+ assertEquals(2, count); // 000 001
+
+ scan = new Scan();
+ scan.setSmall(small);
+ scan.setReversed(true);
+ scan.setStartRow(Bytes.toBytes("000"));
+ scanner = table.getScanner(scan);
+ count = 0;
+ lastRow = null;
+ for (Result r : scanner) {
+ assertTrue(!r.isEmpty());
+ count++;
+ byte[] thisRow = r.getRow();
+ if (lastRow != null) {
+ assertTrue("Error scan order, last row= " + Bytes.toString(lastRow)
+ + ",this row=" + Bytes.toString(thisRow),
+ Bytes.compareTo(thisRow, lastRow) < 0);
+ }
+ lastRow = thisRow;
+ }
+ assertEquals(1, count); // 000
+
+ scan = new Scan();
+ scan.setSmall(small);
+ scan.setReversed(true);
+ scan.setStartRow(Bytes.toBytes("006"));
+ scan.setStopRow(Bytes.toBytes("002"));
+ scanner = table.getScanner(scan);
+ count = 0;
+ lastRow = null;
+ for (Result r : scanner) {
+ assertTrue(!r.isEmpty());
+ count++;
+ byte[] thisRow = r.getRow();
+ if (lastRow != null) {
+ assertTrue("Error scan order, last row= " + Bytes.toString(lastRow)
+ + ",this row=" + Bytes.toString(thisRow),
+ Bytes.compareTo(thisRow, lastRow) < 0);
+ }
+ lastRow = thisRow;
+ }
+ assertEquals(4, count); // 003 004 005 006
+ }
}
|
d056c0b062311cc1e90ad134076fb13bcfe3ccf4
|
drools
|
JBRULES-2835: use new Environment constant--
|
p
|
https://github.com/kiegroup/drools
|
diff --git a/drools-persistence-jpa/src/main/java/org/drools/persistence/jpa/processinstance/JPAWorkItemManager.java b/drools-persistence-jpa/src/main/java/org/drools/persistence/jpa/processinstance/JPAWorkItemManager.java
index 6ec1bd8a503..4bfc2c1f0ee 100644
--- a/drools-persistence-jpa/src/main/java/org/drools/persistence/jpa/processinstance/JPAWorkItemManager.java
+++ b/drools-persistence-jpa/src/main/java/org/drools/persistence/jpa/processinstance/JPAWorkItemManager.java
@@ -93,7 +93,7 @@ public void internalAddWorkItem(WorkItem workItem) {
public void completeWorkItem(long id, Map<String, Object> results) {
Environment env = this.kruntime.getEnvironment();
// EntityManager em = (EntityManager) env.get(EnvironmentName.CMD_SCOPED_ENTITY_MANAGER);
- PersistenceContext context = ((PersistenceContextManager) env.get( EnvironmentName.ENTITY_MANAGER_FACTORY )).getCommandScopedPersistenceContext();
+ PersistenceContext context = ((PersistenceContextManager) env.get( EnvironmentName.PERSISTENCE_CONTEXT_MANAGER )).getCommandScopedPersistenceContext();
WorkItemInfo workItemInfo = null;
|
aa30bba068584f0a343d986de20c7d8a62037b2e
|
intellij-community
|
method return fix should not touch super method- formal parameters--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/java/java-impl/src/com/intellij/codeInsight/daemon/impl/quickfix/MethodReturnFix.java b/java/java-impl/src/com/intellij/codeInsight/daemon/impl/quickfix/MethodReturnFix.java
index a29204cd117f6..9ac74c5efa5ae 100644
--- a/java/java-impl/src/com/intellij/codeInsight/daemon/impl/quickfix/MethodReturnFix.java
+++ b/java/java-impl/src/com/intellij/codeInsight/daemon/impl/quickfix/MethodReturnFix.java
@@ -21,16 +21,20 @@
import com.intellij.ide.util.SuperMethodWarningUtil;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.undo.UndoUtil;
+import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
-import com.intellij.openapi.editor.Editor;
-import com.intellij.psi.PsiFile;
-import com.intellij.psi.PsiMethod;
-import com.intellij.psi.PsiType;
+import com.intellij.psi.*;
+import com.intellij.psi.search.LocalSearchScope;
+import com.intellij.psi.util.PsiUtil;
import com.intellij.psi.util.TypeConversionUtil;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.refactoring.changeSignature.ChangeSignatureProcessor;
+import com.intellij.refactoring.typeMigration.TypeMigrationLabeler;
+import com.intellij.refactoring.typeMigration.TypeMigrationProcessor;
+import com.intellij.refactoring.typeMigration.TypeMigrationRules;
import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
public class MethodReturnFix extends IntentionAndQuickFixAction {
private final PsiMethod myMethod;
@@ -74,6 +78,8 @@ public void applyFix(final Project project, final PsiFile file, final Editor edi
if (superMethod != null) {
final PsiType superReturnType = superMethod.getReturnType();
if (superReturnType != null && !Comparing.equal(myReturnType, superReturnType)) {
+ final PsiClass psiClass = PsiUtil.resolveClassInType(superReturnType);
+ if (psiClass instanceof PsiTypeParameter && changeClassTypeArgument(project, (PsiTypeParameter)psiClass)) return;
method = SuperMethodWarningUtil.checkSuperMethod(myMethod, RefactoringBundle.message("to.refactor"));
if (method == null) return;
}
@@ -86,15 +92,67 @@ public void applyFix(final Project project, final PsiFile file, final Editor edi
method.getName(),
myReturnType,
RemoveUnusedParameterFix.getNewParametersInfo(method, null));
- if (ApplicationManager.getApplication().isUnitTestMode()) {
- processor.run();
+ processor.run();
+ if (method.getContainingFile() != file) {
+ UndoUtil.markPsiFileForUndo(file);
}
- else {
+ }
+
+ private boolean changeClassTypeArgument(Project project, PsiTypeParameter typeParameter) {
+ final PsiTypeParameterListOwner owner = typeParameter.getOwner();
+ if (owner instanceof PsiClass) {
+ final PsiClass derivedClass = myMethod.getContainingClass();
+ if (derivedClass == null) return true;
+ PsiType returnType = myReturnType;
+ if (returnType instanceof PsiPrimitiveType) {
+ returnType = ((PsiPrimitiveType)returnType).getBoxedType(derivedClass);
+ }
+ final PsiSubstitutor superClassSubstitutor =
+ TypeConversionUtil.getSuperClassSubstitutor((PsiClass)owner, derivedClass, PsiSubstitutor.EMPTY);
+ final PsiSubstitutor substitutor = superClassSubstitutor.put(typeParameter, returnType);
+ final TypeMigrationRules rules = new TypeMigrationRules(TypeMigrationLabeler.getElementType(derivedClass));
+ rules.setMigrationRootType(JavaPsiFacade.getElementFactory(project).createType(((PsiClass)owner), substitutor));
+ rules.setBoundScope(new LocalSearchScope(derivedClass));
+
+ final PsiReferenceParameterList referenceParameterList = findTypeArgumentsList(owner, derivedClass);
+ if (referenceParameterList == null) return true;
+ final TypeMigrationProcessor processor = new TypeMigrationProcessor(project, referenceParameterList, rules);
+ processor.setPreviewUsages(!ApplicationManager.getApplication().isUnitTestMode());
processor.run();
+ return true;
}
- if (method.getContainingFile() != file) {
- UndoUtil.markPsiFileForUndo(file);
+ return false;
+ }
+
+ @Nullable
+ private static PsiReferenceParameterList findTypeArgumentsList(final PsiTypeParameterListOwner owner, final PsiClass derivedClass) {
+ PsiReferenceParameterList referenceParameterList = null;
+ if (derivedClass instanceof PsiAnonymousClass) {
+ referenceParameterList = ((PsiAnonymousClass)derivedClass).getBaseClassReference().getParameterList();
+ } else {
+ final PsiReferenceList implementsList = derivedClass.getImplementsList();
+ if (implementsList != null) {
+ referenceParameterList = extractReferenceParameterList(owner, implementsList);
+ }
+ if (referenceParameterList == null) {
+ final PsiReferenceList extendsList = derivedClass.getExtendsList();
+ if (extendsList != null) {
+ referenceParameterList = extractReferenceParameterList(owner, extendsList);
+ }
+ }
+ }
+ return referenceParameterList;
+ }
+
+ @Nullable
+ private static PsiReferenceParameterList extractReferenceParameterList(final PsiTypeParameterListOwner owner,
+ final PsiReferenceList extendsList) {
+ for (PsiJavaCodeReferenceElement referenceElement : extendsList.getReferenceElements()) {
+ if (referenceElement.resolve() == owner) {
+ return referenceElement.getParameterList();
+ }
}
+ return null;
}
}
diff --git a/java/java-impl/src/com/intellij/refactoring/typeMigration/ClassTypeArgumentMigrationProcessor.java b/java/java-impl/src/com/intellij/refactoring/typeMigration/ClassTypeArgumentMigrationProcessor.java
index 999a9d6f3bb1c..9f78f9d47e37e 100644
--- a/java/java-impl/src/com/intellij/refactoring/typeMigration/ClassTypeArgumentMigrationProcessor.java
+++ b/java/java-impl/src/com/intellij/refactoring/typeMigration/ClassTypeArgumentMigrationProcessor.java
@@ -8,12 +8,11 @@
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.psi.*;
-import com.intellij.psi.search.searches.OverridingMethodsSearch;
import com.intellij.psi.search.searches.ReferencesSearch;
-import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.InheritanceUtil;
+import com.intellij.psi.util.MethodSignatureUtil;
+import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.refactoring.typeMigration.usageInfo.TypeMigrationUsageInfo;
-import com.intellij.util.Query;
import java.util.*;
@@ -135,15 +134,15 @@ private static Map<PsiClass, PsiTypeParameter[]> getTypeParametersHierarchy(fina
*/
private void prepareMethodsChangeSignature(final PsiClass currentClass, final PsiElement memberToChangeSignature, final PsiType memberType) {
if (memberToChangeSignature instanceof PsiMethod) {
- final Query<PsiMethod> methodQuery = OverridingMethodsSearch.search(((PsiMethod)memberToChangeSignature), currentClass.getUseScope(), true);
- for (PsiMethod method : methodQuery) {
- myLabeler.addRoot(new TypeMigrationUsageInfo(method), memberType, method, false);
+ final PsiMethod method = MethodSignatureUtil.findMethodBySuperMethod(currentClass, (PsiMethod)memberToChangeSignature, true);
+ if (method.getContainingClass() == currentClass) {
+ myLabeler.addRoot(new TypeMigrationUsageInfo(method), memberType, method, false);
}
} else if (memberToChangeSignature instanceof PsiParameter && ((PsiParameter)memberToChangeSignature).getDeclarationScope() instanceof PsiMethod) {
final PsiMethod superMethod = (PsiMethod)((PsiParameter)memberToChangeSignature).getDeclarationScope();
final int parameterIndex = superMethod.getParameterList().getParameterIndex((PsiParameter)memberToChangeSignature);
- final Query<PsiMethod> methodQuery = OverridingMethodsSearch.search(superMethod, currentClass.getUseScope(), true);
- for (PsiMethod method : methodQuery) {
+ final PsiMethod method = MethodSignatureUtil.findMethodBySuperMethod(currentClass, superMethod, true);
+ if (method.getContainingClass() == currentClass) {
final PsiParameter parameter = method.getParameterList().getParameters()[parameterIndex];
myLabeler.addRoot(new TypeMigrationUsageInfo(parameter), memberType, parameter, false);
}
diff --git a/java/java-impl/src/com/intellij/refactoring/typeMigration/TypeMigrationLabeler.java b/java/java-impl/src/com/intellij/refactoring/typeMigration/TypeMigrationLabeler.java
index c1a88667de4b5..b1f85817e46b5 100644
--- a/java/java-impl/src/com/intellij/refactoring/typeMigration/TypeMigrationLabeler.java
+++ b/java/java-impl/src/com/intellij/refactoring/typeMigration/TypeMigrationLabeler.java
@@ -540,7 +540,7 @@ void setConversionMapping(final PsiExpression expression, final Object obj) {
}
public PsiReference[] markRootUsages(final PsiElement element, final PsiType migrationType) {
- return markRootUsages(element, migrationType, ReferencesSearch.search(element, element.getUseScope(), false).toArray(new PsiReference[0]));
+ return markRootUsages(element, migrationType, ReferencesSearch.search(element, myRules.getSearchScope(), false).toArray(new PsiReference[0]));
}
PsiReference[] markRootUsages(final PsiElement element, final PsiType migrationType, final PsiReference[] refs) {
|
1e31fa5f695e2f3f99fec2488197e99985f36376
|
camel
|
CAMEL-1461: JMSProducer only sets JMSReplyTo if- exchange is out capable. Fixed a NPE.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@757693 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/camel
|
diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/EndpointMessageListener.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/EndpointMessageListener.java
index 3ad34041fb779..8fe911ecd594b 100644
--- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/EndpointMessageListener.java
+++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/EndpointMessageListener.java
@@ -88,8 +88,8 @@ public void onMessage(final Message message) {
cause = exchange.getException();
sendReply = true;
} else {
- // only throw exception if endpoint is not configured to transfer exceptions
- // back to caller
+ // only throw exception if endpoint is not configured to transfer exceptions back to caller
+ // do not send a reply but wrap and rethrow the exception
rce = wrapRuntimeCamelException(exchange.getException());
}
} else if (exchange.getFault().getBody() != null) {
@@ -97,14 +97,14 @@ public void onMessage(final Message message) {
body = exchange.getFault();
sendReply = true;
}
- } else {
+ } else if (exchange.getOut(false) != null) {
// process OK so get the reply
body = exchange.getOut(false);
sendReply = true;
}
// send the reply if we got a response and the exchange is out capable
- if (sendReply && !disableReplyTo && exchange.getPattern().isOutCapable()) {
+ if (rce == null && sendReply && !disableReplyTo && exchange.getPattern().isOutCapable()) {
sendReply(replyDestination, message, exchange, body, cause);
}
diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsBinding.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsBinding.java
index 7736755481505..64dca42bf983f 100644
--- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsBinding.java
+++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsBinding.java
@@ -47,6 +47,7 @@
import org.apache.camel.spi.HeaderFilterStrategy;
import org.apache.camel.util.CamelContextHelper;
import org.apache.camel.util.ExchangeHelper;
+import org.apache.camel.util.ObjectHelper;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -215,6 +216,7 @@ public Message makeJmsMessage(Exchange exchange, org.apache.camel.Message camelM
// create jms message containg the caused exception
answer = createJmsMessage(cause, session);
} else {
+ ObjectHelper.notNull(camelMessage, "message body");
// create regular jms message using the camel message body
answer = createJmsMessage(exchange, camelMessage.getBody(), camelMessage.getHeaders(), session, exchange.getContext());
appendJmsProperties(answer, exchange, camelMessage);
@@ -249,7 +251,14 @@ public void appendJmsProperty(Message jmsMessage, Exchange exchange, org.apache.
if (headerName.equals("JMSCorrelationID")) {
jmsMessage.setJMSCorrelationID(ExchangeHelper.convertToType(exchange, String.class, headerValue));
} else if (headerName.equals("JMSReplyTo") && headerValue != null) {
- jmsMessage.setJMSReplyTo(ExchangeHelper.convertToType(exchange, Destination.class, headerValue));
+ if (exchange.getPattern().isOutCapable()) {
+ // only set the JMSReply if the Exchange supports Out
+ jmsMessage.setJMSReplyTo(ExchangeHelper.convertToType(exchange, Destination.class, headerValue));
+ } else {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Exchange is not out capable, Ignoring JMSReplyTo: " + headerValue);
+ }
+ }
} else if (headerName.equals("JMSType")) {
jmsMessage.setJMSType(ExchangeHelper.convertToType(exchange, String.class, headerValue));
} else if (LOG.isDebugEnabled()) {
diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsRouteWithInOnlyTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsRouteWithInOnlyTest.java
new file mode 100644
index 0000000000000..2b7adad192f50
--- /dev/null
+++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsRouteWithInOnlyTest.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.jms;
+
+import javax.jms.ConnectionFactory;
+
+import org.apache.activemq.ActiveMQConnectionFactory;
+import org.apache.camel.CamelContext;
+import org.apache.camel.ContextTestSupport;
+import org.apache.camel.ExchangePattern;
+import org.apache.camel.builder.RouteBuilder;
+import org.apache.camel.component.mock.MockEndpoint;
+import org.apache.camel.impl.JndiRegistry;
+import static org.apache.camel.component.jms.JmsComponent.jmsComponentClientAcknowledge;
+
+/**
+ * Unit test inspired by user forum
+ *
+ * @version $Revision$
+ */
+public class JmsRouteWithInOnlyTest extends ContextTestSupport {
+
+ protected String componentName = "activemq";
+
+ public void testSendOrder() throws Exception {
+ MockEndpoint inbox = getMockEndpoint("mock:inbox");
+ inbox.expectedBodiesReceived("Camel in Action");
+
+ MockEndpoint order = getMockEndpoint("mock:topic");
+ order.expectedBodiesReceived("Camel in Action");
+
+ Object out = template.requestBody("activemq:queue:inbox", "Camel in Action");
+ assertEquals("OK: Camel in Action", out);
+
+ assertMockEndpointsSatisfied();
+
+ // assert MEP
+ assertEquals(ExchangePattern.InOut, inbox.getReceivedExchanges().get(0).getPattern());
+ assertEquals(ExchangePattern.InOnly, order.getReceivedExchanges().get(0).getPattern());
+ }
+
+ @Override
+ protected JndiRegistry createRegistry() throws Exception {
+ JndiRegistry jndi = super.createRegistry();
+ jndi.bind("orderService", new MyOrderServiceBean());
+ return jndi;
+ }
+
+ protected CamelContext createCamelContext() throws Exception {
+ CamelContext camelContext = super.createCamelContext();
+
+ ConnectionFactory connectionFactory = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false");
+ camelContext.addComponent(componentName, jmsComponentClientAcknowledge(connectionFactory));
+
+ return camelContext;
+ }
+
+ @Override
+ protected RouteBuilder createRouteBuilder() throws Exception {
+ return new RouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ from("activemq:queue:inbox")
+ .to("mock:inbox")
+ .inOnly("activemq:topic:order")
+ .beanRef("orderService", "handleOrder");
+
+ from("activemq:topic:order")
+ .to("mock:topic");
+ }
+ };
+ }
+
+ public static class MyOrderServiceBean {
+
+ public String handleOrder(String body) {
+ return "OK: " + body;
+ }
+
+ }
+}
|
48b37d99bff444562915a1378de7834de820ad93
|
hbase
|
HBASE-6529 With HFile v2, the region server will- always perform an extra copy of source files--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1372313 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/hbase
|
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java
index 87a1c13f88d9..ab90034d2567 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java
@@ -53,6 +53,7 @@
import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.backup.HFileArchiver;
import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.Compression;
@@ -564,7 +565,8 @@ void bulkLoadHFile(String srcPathStr) throws IOException {
// Copy the file if it's on another filesystem
FileSystem srcFs = srcPath.getFileSystem(conf);
- if (!srcFs.equals(fs)) {
+ FileSystem desFs = fs instanceof HFileSystem ? ((HFileSystem)fs).getBackingFs() : fs;
+ if (!srcFs.equals(desFs)) {
LOG.info("Bulk-load file " + srcPath + " is on different filesystem than " +
"the destination store. Copying file over to destination filesystem.");
Path tmpPath = getTmpPath();
|
795fd180f7e758d79c1c37ade7b21a944a183c07
|
orientdb
|
Fixed issues with sharing. Added also first test- case on sharding--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/distributed/pom.xml b/distributed/pom.xml
index 1f0f089bfe8..e97400e1ab1 100644
--- a/distributed/pom.xml
+++ b/distributed/pom.xml
@@ -47,6 +47,12 @@
<version>${project.version}</version>
<scope>compile</scope>
</dependency>
+ <dependency>
+ <groupId>com.orientechnologies</groupId>
+ <artifactId>orientdb-graphdb</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
diff --git a/distributed/src/test/java/com/orientechnologies/orient/server/distributed/AbstractServerClusterGraphTest.java b/distributed/src/test/java/com/orientechnologies/orient/server/distributed/AbstractServerClusterGraphTest.java
new file mode 100644
index 00000000000..a2e394599c7
--- /dev/null
+++ b/distributed/src/test/java/com/orientechnologies/orient/server/distributed/AbstractServerClusterGraphTest.java
@@ -0,0 +1,268 @@
+///*
+// * Copyright 2010-2012 Luca Garulli (l.garulli(at)orientechnologies.com)
+// *
+// * Licensed under the Apache License, Version 2.0 (the "License");
+// * you may not use this file except in compliance with the License.
+// * You may obtain a copy of the License at
+// *
+// * http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing, software
+// * distributed under the License is distributed on an "AS IS" BASIS,
+// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// * See the License for the specific language governing permissions and
+// * limitations under the License.
+// */
+//
+//package com.orientechnologies.orient.server.distributed;
+//
+//import java.util.ArrayList;
+//import java.util.Date;
+//import java.util.Iterator;
+//import java.util.List;
+//import java.util.UUID;
+//import java.util.concurrent.Callable;
+//import java.util.concurrent.ExecutorService;
+//import java.util.concurrent.Executors;
+//import java.util.concurrent.Future;
+//import java.util.concurrent.TimeUnit;
+//
+//import junit.framework.Assert;
+//
+//import com.orientechnologies.orient.core.db.document.ODatabaseDocumentPool;
+//import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
+//import com.orientechnologies.orient.core.db.record.OIdentifiable;
+//import com.orientechnologies.orient.core.exception.OQueryParsingException;
+//import com.orientechnologies.orient.core.metadata.schema.OClass;
+//import com.orientechnologies.orient.core.metadata.schema.OClass.INDEX_TYPE;
+//import com.orientechnologies.orient.core.metadata.schema.OSchema;
+//import com.orientechnologies.orient.core.metadata.schema.OType;
+//import com.orientechnologies.orient.core.record.impl.ODocument;
+//import com.orientechnologies.orient.core.sql.OCommandSQL;
+//import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery;
+//import com.tinkerpop.blueprints.Direction;
+//import com.tinkerpop.blueprints.Edge;
+//import com.tinkerpop.blueprints.Vertex;
+//import com.tinkerpop.blueprints.impls.orient.OrientBaseGraph;
+//import com.tinkerpop.blueprints.impls.orient.OrientGraphFactory;
+//
+///**
+// * Test distributed TX
+// */
+//public abstract class AbstractServerClusterGraphTest extends AbstractServerClusterTest {
+// protected static final int delayWriter = 0;
+// protected static final int delayReader = 1000;
+// protected static final int writerCount = 5;
+// protected int count = 1000;
+// protected long beginInstances;
+//
+// class Writer implements Callable<Void> {
+// private final String databaseUrl;
+// private final OrientGraphFactory factory;
+// private int serverId;
+//
+// public Writer(final int iServerId, final String db) {
+// serverId = iServerId;
+// databaseUrl = db;
+// factory = new OrientGraphFactory(databaseUrl, "admin", "admin");
+// }
+//
+// @Override
+// public Void call() throws Exception {
+// String name = Integer.toString(serverId);
+// for (int i = 0; i < count; i++) {
+// final OrientBaseGraph graph = factory.getTx();
+//
+// if ((i + 1) % 100 == 0)
+// System.out.println("\nWriter " + graph.getRawGraph().getURL() + " managed " + (i + 1) + "/" + count + " records so far");
+//
+// Vertex shelve = graph.addVertex(null);
+// shelve.setProperty("EQUIP_TYPE", "Shelf");
+// Vertex card = graph.addVertex(null);
+// card.setProperty("EQUIP_TYPE", "Card");
+// graph.addEdge(null, shelve, card, "GEO");
+//
+// try {
+// Iterable<Vertex> vertices = graph.command(new OCommandSQL(queryForVertices.toString())).execute();
+// for (Vertex vertex : vertices) {
+// Iterator<Edge> egdeIterator = vertex.getEdges(Direction.OUT, "GEO").iterator();
+// while (egdeIterator.hasNext()) {
+// Edge e = egdeIterator.next();
+// graph.removeEdge(e);
+// }
+// }
+//
+// Thread.sleep(delayWriter);
+//
+// } catch (InterruptedException e) {
+// System.out.println("Writer received interrupt (db=" + database.getURL());
+// Thread.currentThread().interrupt();
+// break;
+// } catch (Exception e) {
+// System.out.println("Writer received exception (db=" + database.getURL());
+// e.printStackTrace();
+// break;
+// } finally {
+// database.close();
+// }
+// }
+//
+// System.out.println("\nWriter " + name + " END");
+// return null;
+// }
+//
+// private ODocument createRecord(ODatabaseDocumentTx database, int i) {
+// final int uniqueId = count * serverId + i;
+//
+// ODocument person = new ODocument("Person").fields("id", UUID.randomUUID().toString(), "name", "Billy" + uniqueId, "surname",
+// "Mayes" + uniqueId, "birthday", new Date(), "children", uniqueId);
+// database.save(person);
+// return person;
+// }
+//
+// private void updateRecord(ODatabaseDocumentTx database, ODocument doc) {
+// doc.field("updated", true);
+// doc.save();
+// }
+//
+// private void checkRecord(ODatabaseDocumentTx database, ODocument doc) {
+// doc.reload();
+// Assert.assertEquals(doc.field("updated"), Boolean.TRUE);
+// }
+// }
+//
+// class Reader implements Callable<Void> {
+// private final String databaseUrl;
+//
+// public Reader(final String db) {
+// databaseUrl = db;
+// }
+//
+// @Override
+// public Void call() throws Exception {
+// try {
+// while (!Thread.interrupted()) {
+// try {
+// printStats(databaseUrl);
+// Thread.sleep(delayReader);
+//
+// } catch (Exception e) {
+// break;
+// }
+// }
+//
+// } finally {
+// printStats(databaseUrl);
+// }
+// return null;
+// }
+// }
+//
+// public String getDatabaseName() {
+// return "distributed";
+// }
+//
+// public void executeTest() throws Exception {
+//
+// ODatabaseDocumentTx database = ODatabaseDocumentPool.global().acquire(getDatabaseURL(serverInstance.get(0)), "admin", "admin");
+// try {
+// List<ODocument> result = database.query(new OSQLSynchQuery<OIdentifiable>("select count(*) from Person"));
+// beginInstances = result.get(0).field("count");
+// } finally {
+// database.close();
+// }
+//
+// System.out.println("Creating Writers and Readers threads...");
+//
+// final ExecutorService executor = Executors.newCachedThreadPool();
+//
+// int i = 0;
+// List<Callable<Void>> workers = new ArrayList<Callable<Void>>();
+// for (ServerRun server : serverInstance) {
+// for (int j = 0; j < writerCount; j++) {
+// Writer writer = new Writer(i++, getDatabaseURL(server));
+// workers.add(writer);
+// }
+//
+// Reader reader = new Reader(getDatabaseURL(server));
+// workers.add(reader);
+// }
+//
+// List<Future<Void>> futures = executor.invokeAll(workers);
+//
+// System.out.println("Threads started, waiting for the end");
+//
+// executor.shutdown();
+// Assert.assertTrue(executor.awaitTermination(10, TimeUnit.MINUTES));
+//
+// for (Future<Void> future : futures) {
+// future.get();
+// }
+//
+// System.out.println("All threads have finished, shutting down server instances");
+//
+// for (ServerRun server : serverInstance) {
+// printStats(getDatabaseURL(server));
+// }
+// }
+//
+// protected abstract String getDatabaseURL(ServerRun server);
+//
+// /**
+// * Event called right after the database has been created and right before to be replicated to the X servers
+// *
+// * @param db
+// * Current database
+// */
+// protected void onAfterDatabaseCreation(final ODatabaseDocumentTx db) {
+// System.out.println("Creating database schema...");
+//
+// // CREATE BASIC SCHEMA
+// OClass personClass = db.getMetadata().getSchema().createClass("Person");
+// personClass.createProperty("id", OType.STRING);
+// personClass.createProperty("name", OType.STRING);
+// personClass.createProperty("birthday", OType.DATE);
+// personClass.createProperty("children", OType.INTEGER);
+//
+// final OSchema schema = db.getMetadata().getSchema();
+// OClass person = schema.getClass("Person");
+// person.createIndex("Person.name", INDEX_TYPE.UNIQUE, "name");
+//
+// OClass customer = schema.createClass("Customer", person);
+// customer.createProperty("totalSold", OType.DECIMAL);
+//
+// OClass provider = schema.createClass("Provider", person);
+// provider.createProperty("totalPurchased", OType.DECIMAL);
+//
+// new ODocument("Customer").fields("name", "Jay", "surname", "Miner").save();
+// new ODocument("Customer").fields("name", "Luke", "surname", "Skywalker").save();
+// new ODocument("Provider").fields("name", "Yoda", "surname", "Nothing").save();
+// }
+//
+// private void printStats(final String databaseUrl) {
+// final ODatabaseDocumentTx database = ODatabaseDocumentPool.global().acquire(databaseUrl, "admin", "admin");
+// try {
+// List<ODocument> result = database.query(new OSQLSynchQuery<OIdentifiable>("select count(*) from Person"));
+//
+// final String name = database.getURL();
+//
+// System.out.println("\nReader " + name + " sql count: " + result.get(0) + " counting class: " + database.countClass("Person")
+// + " counting cluster: " + database.countClusterElements("Person"));
+//
+// if (database.getMetadata().getSchema().existsClass("ODistributedConflict"))
+// try {
+// List<ODocument> conflicts = database
+// .query(new OSQLSynchQuery<OIdentifiable>("select count(*) from ODistributedConflict"));
+// long totalConflicts = conflicts.get(0).field("count");
+// Assert.assertEquals(0l, totalConflicts);
+// System.out.println("\nReader " + name + " conflicts: " + totalConflicts);
+// } catch (OQueryParsingException e) {
+// // IGNORE IT
+// }
+//
+// } finally {
+// database.close();
+// }
+//
+// }
+//}
diff --git a/distributed/src/test/java/com/orientechnologies/orient/server/distributed/AbstractServerClusterTest.java b/distributed/src/test/java/com/orientechnologies/orient/server/distributed/AbstractServerClusterTest.java
index ca13222a8ff..659bc01a8d6 100755
--- a/distributed/src/test/java/com/orientechnologies/orient/server/distributed/AbstractServerClusterTest.java
+++ b/distributed/src/test/java/com/orientechnologies/orient/server/distributed/AbstractServerClusterTest.java
@@ -15,17 +15,16 @@
*/
package com.orientechnologies.orient.server.distributed;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-import org.junit.Assert;
-
import com.hazelcast.core.Hazelcast;
import com.orientechnologies.orient.core.Orient;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.record.impl.ODocument;
+import org.junit.Assert;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
/**
* Test class that creates and executes distributed operations against a cluster of servers created in the same JVM.
@@ -40,18 +39,44 @@ public abstract class AbstractServerClusterTest {
protected AbstractServerClusterTest() {
}
- protected abstract String getDatabaseName();
+ @SuppressWarnings("unchecked")
+ public static void main(final String[] args) throws Exception {
+ Class<? extends AbstractServerClusterTest> testClass = null;
+ String command = null;
+ int servers = 2;
- /**
- * Event called right after the database has been created and right before to be replicated to the X servers
- *
- * @param db
- * Current database
- */
- protected void onAfterDatabaseCreation(final ODatabaseDocumentTx db) {
+ if (args.length > 0)
+ testClass = (Class<? extends AbstractServerClusterTest>) Class.forName(args[0]);
+ else
+ syntaxError();
+
+ if (args.length > 1)
+ command = args[1];
+ else
+ syntaxError();
+
+ if (args.length > 2)
+ servers = Integer.parseInt(args[2]);
+
+ final AbstractServerClusterTest main = testClass.newInstance();
+ main.init(servers);
+
+ if (command.equals("prepare"))
+ main.prepare(true);
+ else if (command.equals("execute"))
+ main.execute();
+ else if (command.equals("prepare+execute")) {
+ main.prepare(true);
+ main.execute();
+ } else
+ System.out.println("Usage: prepare, execute or prepare+execute ...");
}
- protected abstract void executeTest() throws Exception;
+ private static void syntaxError() {
+ System.err
+ .println("Syntax error. Usage: <class> <operation> [<servers>]\nWhere <operation> can be: prepare|execute|prepare+execute");
+ System.exit(1);
+ }
public void init(final int servers) {
Orient.setRegisterDatabaseByPath(true);
@@ -63,7 +88,7 @@ public void execute() throws Exception {
System.out.println("Starting test against " + serverInstance.size() + " server nodes...");
for (ServerRun server : serverInstance) {
- server.startServer("orientdb-dserver-config-" + server.getServerId() + ".xml");
+ server.startServer(getDistributedServerConfiguration(server));
try {
Thread.sleep(delayServerStartup * serverInstance.size());
} catch (InterruptedException e) {
@@ -80,7 +105,6 @@ public void execute() throws Exception {
for (ServerRun server : serverInstance) {
final ODocument cfg = server.getServerInstance().getDistributedManager().getClusterConfiguration();
Assert.assertNotNull(cfg);
- // Assert.assertEquals(((Collection<?>) cfg.field("members")).size(), serverInstance.size());
}
System.out.println("Executing test...");
@@ -88,6 +112,7 @@ public void execute() throws Exception {
try {
executeTest();
} finally {
+ System.out.println("Shutting down nodes...");
for (ServerRun server : serverInstance)
server.shutdownServer();
Hazelcast.shutdownAll();
@@ -95,7 +120,25 @@ public void execute() throws Exception {
}
}
- protected void prepare() throws IOException {
+ protected abstract String getDatabaseName();
+
+ /**
+ * Event called right after the database has been created and right before to be replicated to the X servers
+ *
+ * @param db
+ * Current database
+ */
+ protected void onAfterDatabaseCreation(final ODatabaseDocumentTx db) {
+ }
+
+ protected abstract void executeTest() throws Exception;
+
+ /**
+ * Create the database on first node only
+ *
+ * @throws IOException
+ */
+ protected void prepare(final boolean iCopyDatabaseToNodes) throws IOException {
// CREATE THE DATABASE
final Iterator<ServerRun> it = serverInstance.iterator();
final ServerRun master = it.next();
@@ -110,46 +153,16 @@ protected void prepare() throws IOException {
// COPY DATABASE TO OTHER SERVERS
while (it.hasNext()) {
final ServerRun replicaSrv = it.next();
- master.copyDatabase(getDatabaseName(), replicaSrv.getDatabasePath(getDatabaseName()));
- }
- }
- @SuppressWarnings("unchecked")
- public static void main(final String[] args) throws Exception {
- Class<? extends AbstractServerClusterTest> testClass = null;
- String command = null;
- int servers = 2;
+ replicaSrv.deleteNode();
- if (args.length > 0)
- testClass = (Class<? extends AbstractServerClusterTest>) Class.forName(args[0]);
- else
- syntaxError();
-
- if (args.length > 1)
- command = args[1];
- else
- syntaxError();
-
- if (args.length > 2)
- servers = Integer.parseInt(args[2]);
-
- final AbstractServerClusterTest main = testClass.newInstance();
- main.init(servers);
-
- if (command.equals("prepare"))
- main.prepare();
- else if (command.equals("execute"))
- main.execute();
- else if (command.equals("prepare+execute")) {
- main.prepare();
- main.execute();
- } else
- System.out.println("Usage: prepare, execute or prepare+execute ...");
+ if (iCopyDatabaseToNodes)
+ master.copyDatabase(getDatabaseName(), replicaSrv.getDatabasePath(getDatabaseName()));
+ }
}
- private static void syntaxError() {
- System.err
- .println("Syntax error. Usage: <class> <operation> [<servers>]\nWhere <operation> can be: prepare|execute|prepare+execute");
- System.exit(1);
+ protected String getDistributedServerConfiguration(final ServerRun server) {
+ return "orientdb-dserver-config-" + server.getServerId() + ".xml";
}
+
}
diff --git a/distributed/src/test/java/com/orientechnologies/orient/server/distributed/ServerRun.java b/distributed/src/test/java/com/orientechnologies/orient/server/distributed/ServerRun.java
index 8a0dd2acd19..cee2742d9a8 100644
--- a/distributed/src/test/java/com/orientechnologies/orient/server/distributed/ServerRun.java
+++ b/distributed/src/test/java/com/orientechnologies/orient/server/distributed/ServerRun.java
@@ -31,8 +31,8 @@
* @author Luca Garulli (l.garulli--at--orientechnologies.com)
*/
public class ServerRun {
- protected String rootPath;
protected final String serverId;
+ protected String rootPath;
protected OServer server;
public ServerRun(final String iRootPath, final String serverId) {
@@ -40,6 +40,30 @@ public ServerRun(final String iRootPath, final String serverId) {
this.serverId = serverId;
}
+ public static String getServerHome(final String iServerId) {
+ return "target/server" + iServerId;
+ }
+
+ public static String getDatabasePath(final String iServerId, final String iDatabaseName) {
+ return getServerHome(iServerId) + "/databases/" + iDatabaseName;
+ }
+
+ public OServer getServerInstance() {
+ return server;
+ }
+
+ public String getServerId() {
+ return serverId;
+ }
+
+ public String getBinaryProtocolAddress() {
+ return server.getListenerByProtocol(ONetworkProtocolBinary.class).getListeningAddress();
+ }
+
+ public void deleteNode() {
+ OFileUtils.deleteRecursively(new File(getServerHome()));
+ }
+
protected ODatabaseDocumentTx createDatabase(final String iName) {
OGlobalConfiguration.STORAGE_KEEP_OPEN.setValue(false);
@@ -67,22 +91,15 @@ protected void copyDatabase(final String iDatabaseName, final String iDestinatio
OFileUtils.copyDirectory(new File(getDatabasePath(iDatabaseName)), new File(iDestinationDirectory));
}
- public OServer getServerInstance() {
- return server;
- }
-
- public String getServerId() {
- return serverId;
- }
-
- protected OServer startServer(final String iConfigFile) throws Exception, InstantiationException, IllegalAccessException,
+ protected OServer startServer(final String iServerConfigFile) throws Exception, InstantiationException, IllegalAccessException,
ClassNotFoundException, InvocationTargetException, NoSuchMethodException, IOException {
System.out.println("Starting server " + serverId + " from " + getServerHome() + "...");
System.setProperty("ORIENTDB_HOME", getServerHome());
server = new OServer();
- server.startup(getClass().getClassLoader().getResourceAsStream(iConfigFile));
+ server.setServerRootDirectory(getServerHome());
+ server.startup(getClass().getClassLoader().getResourceAsStream(iServerConfigFile));
server.activate();
return server;
}
@@ -100,15 +117,4 @@ protected String getDatabasePath(final String iDatabaseName) {
return getDatabasePath(serverId, iDatabaseName);
}
- public String getBinaryProtocolAddress() {
- return server.getListenerByProtocol(ONetworkProtocolBinary.class).getListeningAddress();
- }
-
- public static String getServerHome(final String iServerId) {
- return "target/server" + iServerId;
- }
-
- public static String getDatabasePath(final String iServerId, final String iDatabaseName) {
- return getServerHome(iServerId) + "/databases/" + iDatabaseName;
- }
}
diff --git a/distributed/src/test/java/com/orientechnologies/orient/server/distributed/TestDistributed.java b/distributed/src/test/java/com/orientechnologies/orient/server/distributed/TestDistributed.java
deleted file mode 100644
index b964f7f9703..00000000000
--- a/distributed/src/test/java/com/orientechnologies/orient/server/distributed/TestDistributed.java
+++ /dev/null
@@ -1,99 +0,0 @@
-//package com.orientechnologies.orient.server.distributed;
-//
-//import com.hazelcast.core.Hazelcast;
-//import com.hazelcast.core.HazelcastInstance;
-//import com.orientechnologies.common.io.OFileUtils;
-//import com.orientechnologies.orient.core.record.impl.ODocument;
-//import com.orientechnologies.orient.server.OServer;
-//import com.orientechnologies.orient.server.OServerMain;
-//import com.orientechnologies.orient.server.hazelcast.OHazelcastPlugin;
-//import com.tinkerpop.blueprints.impls.orient.OrientGraphFactory;
-//import com.tinkerpop.blueprints.impls.orient.OrientGraphNoTx;
-//import org.junit.After;
-//import org.junit.Before;
-//import org.junit.Test;
-//
-//import java.io.File;
-//import java.io.FileNotFoundException;
-//import java.util.ArrayList;
-//import java.util.Collections;
-//
-//public class TestDistributed {
-//
-// private OServer server;
-//
-// public static class StandaloneHazelcastPlugin extends OHazelcastPlugin {
-//
-// @Override
-// protected HazelcastInstance configureHazelcast() throws FileNotFoundException {
-// return Hazelcast.newHazelcastInstance();
-// }
-//
-// @Override
-// protected ODocument loadDatabaseConfiguration(String iDatabaseName, File file) {
-// ODocument doc = new ODocument();
-// doc.field("replication", true)
-// .field("autoDeploy", true)
-// .field("hotAlignment", true)
-// .field("resyncEvery", 15)
-// .field(
-// "clusters",
-// new ODocument()
-// .field("internal", new ODocument().field("replication", false))
-// .field("index", new ODocument().field("replication", false))
-// .field(
-// "*",
-// new ODocument()
-// .field("replication", true)
-// .field("readQuorum", 1)
-// .field("writeQuorum", 1)
-// .field("failureAvailableNodesLessQuorum", false)
-// .field("readYourWrites", true)
-// .field(
-// "partitioning",
-// new ODocument()
-// .field("strategy", "round-robin")
-// .field("default", 0)
-// .field("partitions", Collections.singletonList(new ArrayList<String>(Collections.singletonList("<NEW_NODE>")))))));
-//
-// return doc;
-// }
-// }
-//
-// @Before
-// public void setUp() throws Exception {
-// File target = new File("target/testdb");
-// OFileUtils.deleteRecursively(target);
-// target.mkdirs();
-//
-// server = OServerMain.create();
-// server
-// .startup("<orient-server>"
-// + "<handlers>"
-// + "<handler class=\""
-// + StandaloneHazelcastPlugin.class.getName()
-// + "\">"
-// + "<parameters>"
-// + "<parameter name=\"enabled\" value=\"true\" />"
-// + "<parameter name=\"sharding.strategy.round-robin\" value=\"com.orientechnologies.orient.server.hazelcast.sharding.strategy.ORoundRobinPartitioninStrategy\" />"
-// + "</parameters>"
-// + "</handler>"
-// + "</handlers>"
-// + "<network><protocols></protocols><listeners></listeners><cluster></cluster></network><storages></storages><users></users>"
-// + "<properties><entry name=\"server.database.path\" value=\"target/\"/></properties>" + "</orient-server>");
-// server.activate();
-// }
-//
-// @After
-// public void tearDown() {
-// server.shutdown();
-// }
-//
-// @Test
-// public void testCreateClass() {
-// OrientGraphFactory factory = new OrientGraphFactory("plocal:target/testdb");
-// OrientGraphNoTx graph = factory.getNoTx();
-//
-// graph.addVertex(null);
-// }
-//}
diff --git a/distributed/src/test/java/com/orientechnologies/orient/server/distributed/TestSharding.java b/distributed/src/test/java/com/orientechnologies/orient/server/distributed/TestSharding.java
new file mode 100644
index 00000000000..e3c415bc60f
--- /dev/null
+++ b/distributed/src/test/java/com/orientechnologies/orient/server/distributed/TestSharding.java
@@ -0,0 +1,69 @@
+package com.orientechnologies.orient.server.distributed;
+
+import junit.framework.Assert;
+
+import org.junit.Test;
+
+import com.orientechnologies.orient.core.sql.OCommandSQL;
+import com.tinkerpop.blueprints.impls.orient.OrientGraphFactory;
+import com.tinkerpop.blueprints.impls.orient.OrientGraphNoTx;
+import com.tinkerpop.blueprints.impls.orient.OrientVertex;
+import com.tinkerpop.blueprints.impls.orient.OrientVertexType;
+
+public class TestSharding extends AbstractServerClusterTest {
+
+ protected OrientVertex[] vertices;
+
+ @Test
+ public void test() throws Exception {
+ init(3);
+ prepare(false);
+ execute();
+ }
+
+ @Override
+ protected String getDatabaseName() {
+ return "sharding";
+ }
+
+ @Override
+ protected String getDistributedServerConfiguration(final ServerRun server) {
+ return "sharded-dserver-config-" + server.getServerId() + ".xml";
+ }
+
+ @Override
+ protected void executeTest() throws Exception {
+ OrientGraphFactory factory = new OrientGraphFactory("plocal:target/server0/databases/" + getDatabaseName());
+ OrientGraphNoTx graph = factory.getNoTx();
+
+ try {
+ final OrientVertexType clientType = graph.createVertexType("Client");
+
+ vertices = new OrientVertex[serverInstance.size()];
+ for (int i = 0; i < vertices.length; ++i) {
+ clientType.addCluster("client_" + i);
+
+ vertices[i] = graph.addVertex("class:Client,cluster:client_" + i);
+ vertices[i].setProperty("name", "shard_" + i);
+ }
+ } finally {
+ graph.shutdown();
+ }
+
+ for (int i = 0; i < vertices.length; ++i) {
+ OrientGraphFactory f = new OrientGraphFactory("plocal:target/server" + i + "/databases/" + getDatabaseName());
+ OrientGraphNoTx g = f.getNoTx();
+ try {
+ Iterable<OrientVertex> result = g.command(new OCommandSQL("select from Client")).execute();
+ Assert.assertTrue(result.iterator().hasNext());
+
+ OrientVertex v = result.iterator().next();
+
+ Assert.assertEquals(v.getProperty("name"), "shard_" + i);
+
+ } finally {
+ graph.shutdown();
+ }
+ }
+ }
+}
diff --git a/distributed/src/test/resources/sharded-distributed-db-config.json b/distributed/src/test/resources/sharded-distributed-db-config.json
new file mode 100644
index 00000000000..a7b381e9498
--- /dev/null
+++ b/distributed/src/test/resources/sharded-distributed-db-config.json
@@ -0,0 +1,58 @@
+{
+ "replication": true,
+ "autoDeploy": true,
+ "hotAlignment": true,
+ "resyncEvery": 15,
+ "clusters": {
+ "internal": {
+ "replication": false
+ },
+ "index": {
+ "replication": false
+ },
+ "client_0": {
+ "replication": false,
+ "partitioning": {
+ "strategy": "round-robin",
+ "default": 0,
+ "partitions": [
+ [ "europe" ]
+ ]
+ }
+ },
+ "client_1": {
+ "replication": false,
+ "partitioning": {
+ "strategy": "round-robin",
+ "default": 0,
+ "partitions": [
+ [ "usa" ]
+ ]
+ }
+ },
+ "client_2": {
+ "replication": false,
+ "partitioning": {
+ "strategy": "round-robin",
+ "default": 0,
+ "partitions": [
+ [ "asia" ]
+ ]
+ }
+ },
+ "*": {
+ "replication": true,
+ "readQuorum": 1,
+ "writeQuorum": 2,
+ "failureAvailableNodesLessQuorum": false,
+ "readYourWrites": true,
+ "partitioning": {
+ "strategy": "round-robin",
+ "default": 0,
+ "partitions": [
+ [ "<NEW_NODE>" ]
+ ]
+ }
+ }
+ }
+}
diff --git a/distributed/src/test/resources/sharded-dserver-config-0.xml b/distributed/src/test/resources/sharded-dserver-config-0.xml
new file mode 100755
index 00000000000..0e0c1ed1453
--- /dev/null
+++ b/distributed/src/test/resources/sharded-dserver-config-0.xml
@@ -0,0 +1,106 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<orient-server>
+ <handlers>
+ <handler
+ class="com.orientechnologies.orient.server.hazelcast.OHazelcastPlugin">
+ <parameters>
+ <parameter value="europe" name="nodeName" />
+ <parameter value="true" name="enabled" />
+ <parameter value="src/test/resources/sharded-hazelcast.xml"
+ name="configuration.hazelcast" />
+ <parameter name="conflict.resolver.impl"
+ value="com.orientechnologies.orient.server.distributed.conflict.ODefaultReplicationConflictResolver" />
+ <parameter name="configuration.db.default"
+ value="src/test/resources/sharded-distributed-db-config.json" />
+
+ <parameter name="sharding.strategy.round-robin"
+ value="com.orientechnologies.orient.server.hazelcast.sharding.strategy.ORoundRobinPartitioninStrategy" />
+ </parameters>
+ </handler>
+ <handler
+ class="com.orientechnologies.orient.server.handler.OAutomaticBackup">
+ <parameters>
+ <parameter value="false" name="enabled" />
+ <parameter value="4h" name="delay" />
+ <parameter value="backup" name="target.directory" />
+ <parameter value="${DBNAME}-${DATE:yyyyMMddHHmmss}.json"
+ name="target.fileName" />
+ <parameter value="" name="db.include" />
+ <parameter value="" name="db.exclude" />
+ </parameters>
+ </handler>
+ <handler
+ class="com.orientechnologies.orient.server.handler.OServerSideScriptInterpreter">
+ <parameters>
+ <parameter value="false" name="enabled" />
+ </parameters>
+ </handler>
+ </handlers>
+ <network>
+ <protocols>
+ <protocol
+ implementation="com.orientechnologies.orient.server.network.protocol.binary.ONetworkProtocolBinary"
+ name="binary" />
+ <protocol
+ implementation="com.orientechnologies.orient.server.network.protocol.http.ONetworkProtocolHttpDb"
+ name="http" />
+ </protocols>
+ <listeners>
+ <listener protocol="binary" port-range="2424-2430"
+ ip-address="0.0.0.0" />
+ <listener protocol="http" port-range="2480-2490" ip-address="0.0.0.0">
+ <parameters>
+ <!-- Connection's custom parameters. If not specified the global configuration
+ will be taken -->
+ <parameter name="network.http.charset" value="utf-8" />
+ <!-- Define additional HTTP headers to always send as response -->
+ <!-- Allow cross-site scripting -->
+ <!-- parameter name="network.http.additionalResponseHeaders" value="Access-Control-Allow-Origin:
+ *;Access-Control-Allow-Credentials: true" / -->
+ </parameters>
+ <commands>
+ <command
+ implementation="com.orientechnologies.orient.server.network.protocol.http.command.get.OServerCommandGetStaticContent"
+ pattern="GET|www GET|studio/ GET| GET|*.htm GET|*.html GET|*.xml GET|*.jpeg GET|*.jpg GET|*.png GET|*.gif GET|*.js GET|*.css GET|*.swf GET|*.ico GET|*.txt GET|*.otf GET|*.pjs GET|*.svg">
+ <parameters>
+ <entry
+ value="Cache-Control: no-cache, no-store, max-age=0, must-revalidate\r\nPragma: no-cache"
+ name="http.cache:*.htm *.html" />
+ <entry value="Cache-Control: max-age=120" name="http.cache:default" />
+ </parameters>
+ </command>
+ </commands>
+ </listener>
+ </listeners>
+ </network>
+ <storages>
+ </storages>
+ <users>
+ <user resources="*" password="test" name="root" />
+ <user resources="connect,server.listDatabases" password="guest"
+ name="guest" />
+ <user resources="database.passthrough"
+ password="79498491C4D4F1360816D003E2004BC04606AA1C31B1A0E3BCF091A30EFDAB7D"
+ name="replicator" />
+ </users>
+ <properties>
+ <!-- DATABASE POOL: size min/max -->
+ <entry name="db.pool.min" value="1" />
+ <entry name="db.pool.max" value="20" />
+
+ <!-- LEVEL1 AND 2 CACHE: enable/disable and set the size as number of entries -->
+ <entry name="cache.level1.enabled" value="false" />
+ <entry name="cache.level1.size" value="1000" />
+ <entry name="cache.level2.enabled" value="true" />
+ <entry name="cache.level2.size" value="1000" />
+
+ <!-- PROFILER: configures the profiler as <seconds-for-snapshot>,<archive-snapshot-size>,<summary-size> -->
+ <entry name="profiler.enabled" value="true" />
+ <!-- <entry name="profiler.config" value="30,10,10" /> -->
+
+ <!-- LOG: enable/Disable logging. Levels are: finer, fine, finest, info,
+ warning -->
+ <entry name="log.console.level" value="info" />
+ <entry name="log.file.level" value="fine" />
+ </properties>
+</orient-server>
diff --git a/distributed/src/test/resources/sharded-dserver-config-1.xml b/distributed/src/test/resources/sharded-dserver-config-1.xml
new file mode 100755
index 00000000000..6454f9109b8
--- /dev/null
+++ b/distributed/src/test/resources/sharded-dserver-config-1.xml
@@ -0,0 +1,106 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<orient-server>
+ <handlers>
+ <handler
+ class="com.orientechnologies.orient.server.hazelcast.OHazelcastPlugin">
+ <parameters>
+ <parameter value="usa" name="nodeName" />
+ <parameter value="true" name="enabled" />
+ <parameter value="src/test/resources/sharded-hazelcast.xml"
+ name="configuration.hazelcast" />
+ <parameter name="conflict.resolver.impl"
+ value="com.orientechnologies.orient.server.distributed.conflict.ODefaultReplicationConflictResolver" />
+ <parameter name="configuration.db.default"
+ value="src/test/resources/sharded-distributed-db-config.json" />
+
+ <parameter name="sharding.strategy.round-robin"
+ value="com.orientechnologies.orient.server.hazelcast.sharding.strategy.ORoundRobinPartitioninStrategy" />
+ </parameters>
+ </handler>
+ <handler
+ class="com.orientechnologies.orient.server.handler.OAutomaticBackup">
+ <parameters>
+ <parameter value="false" name="enabled" />
+ <parameter value="4h" name="delay" />
+ <parameter value="backup" name="target.directory" />
+ <parameter value="${DBNAME}-${DATE:yyyyMMddHHmmss}.json"
+ name="target.fileName" />
+ <parameter value="" name="db.include" />
+ <parameter value="" name="db.exclude" />
+ </parameters>
+ </handler>
+ <handler
+ class="com.orientechnologies.orient.server.handler.OServerSideScriptInterpreter">
+ <parameters>
+ <parameter value="false" name="enabled" />
+ </parameters>
+ </handler>
+ </handlers>
+ <network>
+ <protocols>
+ <protocol
+ implementation="com.orientechnologies.orient.server.network.protocol.binary.ONetworkProtocolBinary"
+ name="binary" />
+ <protocol
+ implementation="com.orientechnologies.orient.server.network.protocol.http.ONetworkProtocolHttpDb"
+ name="http" />
+ </protocols>
+ <listeners>
+ <listener protocol="binary" port-range="2424-2430"
+ ip-address="0.0.0.0" />
+ <listener protocol="http" port-range="2480-2490" ip-address="0.0.0.0">
+ <parameters>
+ <!-- Connection's custom parameters. If not specified the global configuration
+ will be taken -->
+ <parameter name="network.http.charset" value="utf-8" />
+ <!-- Define additional HTTP headers to always send as response -->
+ <!-- Allow cross-site scripting -->
+ <!-- parameter name="network.http.additionalResponseHeaders" value="Access-Control-Allow-Origin:
+ *;Access-Control-Allow-Credentials: true" / -->
+ </parameters>
+ <commands>
+ <command
+ implementation="com.orientechnologies.orient.server.network.protocol.http.command.get.OServerCommandGetStaticContent"
+ pattern="GET|www GET|studio/ GET| GET|*.htm GET|*.html GET|*.xml GET|*.jpeg GET|*.jpg GET|*.png GET|*.gif GET|*.js GET|*.css GET|*.swf GET|*.ico GET|*.txt GET|*.otf GET|*.pjs GET|*.svg">
+ <parameters>
+ <entry
+ value="Cache-Control: no-cache, no-store, max-age=0, must-revalidate\r\nPragma: no-cache"
+ name="http.cache:*.htm *.html" />
+ <entry value="Cache-Control: max-age=120" name="http.cache:default" />
+ </parameters>
+ </command>
+ </commands>
+ </listener>
+ </listeners>
+ </network>
+ <storages>
+ </storages>
+ <users>
+ <user resources="*" password="test" name="root" />
+ <user resources="connect,server.listDatabases" password="guest"
+ name="guest" />
+ <user resources="database.passthrough"
+ password="79498491C4D4F1360816D003E2004BC04606AA1C31B1A0E3BCF091A30EFDAB7D"
+ name="replicator" />
+ </users>
+ <properties>
+ <!-- DATABASE POOL: size min/max -->
+ <entry name="db.pool.min" value="1" />
+ <entry name="db.pool.max" value="20" />
+
+ <!-- LEVEL1 AND 2 CACHE: enable/disable and set the size as number of entries -->
+ <entry name="cache.level1.enabled" value="false" />
+ <entry name="cache.level1.size" value="1000" />
+ <entry name="cache.level2.enabled" value="true" />
+ <entry name="cache.level2.size" value="1000" />
+
+ <!-- PROFILER: configures the profiler as <seconds-for-snapshot>,<archive-snapshot-size>,<summary-size> -->
+ <entry name="profiler.enabled" value="true" />
+ <!-- <entry name="profiler.config" value="30,10,10" /> -->
+
+ <!-- LOG: enable/Disable logging. Levels are: finer, fine, finest, info,
+ warning -->
+ <entry name="log.console.level" value="info" />
+ <entry name="log.file.level" value="fine" />
+ </properties>
+</orient-server>
diff --git a/distributed/src/test/resources/sharded-dserver-config-2.xml b/distributed/src/test/resources/sharded-dserver-config-2.xml
new file mode 100755
index 00000000000..92c92f707ba
--- /dev/null
+++ b/distributed/src/test/resources/sharded-dserver-config-2.xml
@@ -0,0 +1,106 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<orient-server>
+ <handlers>
+ <handler
+ class="com.orientechnologies.orient.server.hazelcast.OHazelcastPlugin">
+ <parameters>
+ <parameter value="asia" name="nodeName" />
+ <parameter value="true" name="enabled" />
+ <parameter value="src/test/resources/sharded-hazelcast.xml"
+ name="configuration.hazelcast" />
+ <parameter name="conflict.resolver.impl"
+ value="com.orientechnologies.orient.server.distributed.conflict.ODefaultReplicationConflictResolver" />
+ <parameter name="configuration.db.default"
+ value="src/test/resources/sharded-distributed-db-config.json" />
+
+ <parameter name="sharding.strategy.round-robin"
+ value="com.orientechnologies.orient.server.hazelcast.sharding.strategy.ORoundRobinPartitioninStrategy" />
+ </parameters>
+ </handler>
+ <handler
+ class="com.orientechnologies.orient.server.handler.OAutomaticBackup">
+ <parameters>
+ <parameter value="false" name="enabled" />
+ <parameter value="4h" name="delay" />
+ <parameter value="backup" name="target.directory" />
+ <parameter value="${DBNAME}-${DATE:yyyyMMddHHmmss}.json"
+ name="target.fileName" />
+ <parameter value="" name="db.include" />
+ <parameter value="" name="db.exclude" />
+ </parameters>
+ </handler>
+ <handler
+ class="com.orientechnologies.orient.server.handler.OServerSideScriptInterpreter">
+ <parameters>
+ <parameter value="false" name="enabled" />
+ </parameters>
+ </handler>
+ </handlers>
+ <network>
+ <protocols>
+ <protocol
+ implementation="com.orientechnologies.orient.server.network.protocol.binary.ONetworkProtocolBinary"
+ name="binary" />
+ <protocol
+ implementation="com.orientechnologies.orient.server.network.protocol.http.ONetworkProtocolHttpDb"
+ name="http" />
+ </protocols>
+ <listeners>
+ <listener protocol="binary" port-range="2424-2430"
+ ip-address="0.0.0.0" />
+ <listener protocol="http" port-range="2480-2490" ip-address="0.0.0.0">
+ <parameters>
+ <!-- Connection's custom parameters. If not specified the global configuration
+ will be taken -->
+ <parameter name="network.http.charset" value="utf-8" />
+ <!-- Define additional HTTP headers to always send as response -->
+ <!-- Allow cross-site scripting -->
+ <!-- parameter name="network.http.additionalResponseHeaders" value="Access-Control-Allow-Origin:
+ *;Access-Control-Allow-Credentials: true" / -->
+ </parameters>
+ <commands>
+ <command
+ implementation="com.orientechnologies.orient.server.network.protocol.http.command.get.OServerCommandGetStaticContent"
+ pattern="GET|www GET|studio/ GET| GET|*.htm GET|*.html GET|*.xml GET|*.jpeg GET|*.jpg GET|*.png GET|*.gif GET|*.js GET|*.css GET|*.swf GET|*.ico GET|*.txt GET|*.otf GET|*.pjs GET|*.svg">
+ <parameters>
+ <entry
+ value="Cache-Control: no-cache, no-store, max-age=0, must-revalidate\r\nPragma: no-cache"
+ name="http.cache:*.htm *.html" />
+ <entry value="Cache-Control: max-age=120" name="http.cache:default" />
+ </parameters>
+ </command>
+ </commands>
+ </listener>
+ </listeners>
+ </network>
+ <storages>
+ </storages>
+ <users>
+ <user resources="*" password="test" name="root" />
+ <user resources="connect,server.listDatabases" password="guest"
+ name="guest" />
+ <user resources="database.passthrough"
+ password="79498491C4D4F1360816D003E2004BC04606AA1C31B1A0E3BCF091A30EFDAB7D"
+ name="replicator" />
+ </users>
+ <properties>
+ <!-- DATABASE POOL: size min/max -->
+ <entry name="db.pool.min" value="1" />
+ <entry name="db.pool.max" value="20" />
+
+ <!-- LEVEL1 AND 2 CACHE: enable/disable and set the size as number of entries -->
+ <entry name="cache.level1.enabled" value="false" />
+ <entry name="cache.level1.size" value="1000" />
+ <entry name="cache.level2.enabled" value="true" />
+ <entry name="cache.level2.size" value="1000" />
+
+ <!-- PROFILER: configures the profiler as <seconds-for-snapshot>,<archive-snapshot-size>,<summary-size> -->
+ <entry name="profiler.enabled" value="true" />
+ <!-- <entry name="profiler.config" value="30,10,10" /> -->
+
+ <!-- LOG: enable/Disable logging. Levels are: finer, fine, finest, info,
+ warning -->
+ <entry name="log.console.level" value="info" />
+ <entry name="log.file.level" value="fine" />
+ </properties>
+</orient-server>
diff --git a/distributed/src/test/resources/sharded-hazelcast.xml b/distributed/src/test/resources/sharded-hazelcast.xml
new file mode 100755
index 00000000000..51c8423e700
--- /dev/null
+++ b/distributed/src/test/resources/sharded-hazelcast.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- ~ Copyright (c) 2008-2012, Hazel Bilisim Ltd. All Rights Reserved. ~
+ ~ Licensed under the Apache License, Version 2.0 (the "License"); ~ you may
+ not use this file except in compliance with the License. ~ You may obtain
+ a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~
+ ~ Unless required by applicable law or agreed to in writing, software ~ distributed
+ under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES
+ OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for
+ the specific language governing permissions and ~ limitations under the License. -->
+
+<hazelcast
+ xsi:schemaLocation="http://www.hazelcast.com/schema/config hazelcast-config-3.0.xsd"
+ xmlns="http://www.hazelcast.com/schema/config" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+ <group>
+ <name>orientdb</name>
+ <password>orientdb</password>
+ </group>
+ <network>
+ <port auto-increment="true">2434</port>
+ <join>
+ <multicast enabled="true">
+ <multicast-group>224.2.2.3</multicast-group>
+ <multicast-port>2434</multicast-port>
+ </multicast>
+ <tcp-ip enabled="false">
+ <member>127.0.0.1:2435</member>
+ <interface>127.0.0.1</interface>
+ </tcp-ip>
+ </join>
+ </network>
+</hazelcast>
diff --git a/server/src/main/java/com/orientechnologies/orient/server/distributed/ODistributedStorage.java b/server/src/main/java/com/orientechnologies/orient/server/distributed/ODistributedStorage.java
index 605f81bf6e7..9df19ef75bc 100755
--- a/server/src/main/java/com/orientechnologies/orient/server/distributed/ODistributedStorage.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/distributed/ODistributedStorage.java
@@ -146,7 +146,8 @@ public Object command(final OCommandRequestText iCommand) {
// ALREADY DISTRIBUTED
return wrapped.command(iCommand);
- if (!dManager.getDatabaseConfiguration(getName()).isReplicationActive(null))
+ final ODistributedConfiguration dbCfg = dManager.getDatabaseConfiguration(getName());
+ if (!dbCfg.isReplicationActive(null) && dbCfg.getPartitioningConfiguration(null) == null)
// DON'T REPLICATE
return wrapped.command(iCommand);
@@ -249,12 +250,13 @@ public OStorageOperationResult<ORawBuffer> readRecord(final ORecordId iRecordId,
try {
final String clusterName = getClusterNameByRID(iRecordId);
- final ODistributedConfiguration dConfig = dManager.getDatabaseConfiguration(getName());
- if (!dManager.getDatabaseConfiguration(getName()).isReplicationActive(clusterName))
+
+ final ODistributedConfiguration dbCfg = dManager.getDatabaseConfiguration(getName());
+ if (!dbCfg.isReplicationActive(clusterName) && dbCfg.getPartitioningConfiguration(clusterName) == null)
// DON'T REPLICATE
return wrapped.readRecord(iRecordId, iFetchPlan, iIgnoreCache, iCallback, loadTombstones, LOCKING_STRATEGY.DEFAULT);
- final ODistributedPartitioningStrategy strategy = dManager.getPartitioningStrategy(dConfig.getPartitionStrategy(clusterName));
+ final ODistributedPartitioningStrategy strategy = dManager.getPartitioningStrategy(dbCfg.getPartitionStrategy(clusterName));
final ODistributedPartition partition = strategy.getPartition(dManager, getName(), clusterName);
if (partition.getNodes().contains(dManager.getLocalNodeName()))
// LOCAL NODE OWNS THE DATA: GET IT LOCALLY BECAUSE IT'S FASTER
@@ -293,12 +295,13 @@ public OStorageOperationResult<ORecordVersion> updateRecord(final ORecordId iRec
try {
final String clusterName = getClusterNameByRID(iRecordId);
- if (!dManager.getDatabaseConfiguration(getName()).isReplicationActive(clusterName))
+ final ODistributedConfiguration dbCfg = dManager.getDatabaseConfiguration(getName());
+ if (!dbCfg.isReplicationActive(clusterName) && dbCfg.getPartitioningConfiguration(clusterName) == null)
// DON'T REPLICATE
return wrapped.updateRecord(iRecordId, iContent, iVersion, iRecordType, iMode, iCallback);
// LOAD PREVIOUS CONTENT TO BE USED IN CASE OF UNDO
- final OStorageOperationResult<ORawBuffer> previousContent = wrapped.readRecord(iRecordId, null, false, null, false,
+ final OStorageOperationResult<ORawBuffer> previousContent = readRecord(iRecordId, null, false, null, false,
LOCKING_STRATEGY.DEFAULT);
// REPLICATE IT
@@ -337,7 +340,8 @@ public OStorageOperationResult<Boolean> deleteRecord(final ORecordId iRecordId,
try {
final String clusterName = getClusterNameByRID(iRecordId);
- if (!dManager.getDatabaseConfiguration(getName()).isReplicationActive(clusterName))
+ final ODistributedConfiguration dbCfg = dManager.getDatabaseConfiguration(getName());
+ if (!dbCfg.isReplicationActive(clusterName) && dbCfg.getPartitioningConfiguration(clusterName) == null)
// DON'T REPLICATE
return wrapped.deleteRecord(iRecordId, iVersion, iMode, iCallback);
@@ -441,7 +445,8 @@ public void commit(final OTransaction iTx, final Runnable callback) {
wrapped.commit(iTx, callback);
else {
try {
- if (!dManager.getDatabaseConfiguration(getName()).isReplicationActive(null))
+ final ODistributedConfiguration dbCfg = dManager.getDatabaseConfiguration(getName());
+ if (!dbCfg.isReplicationActive(null) && dbCfg.getPartitioningConfiguration(null) == null)
// DON'T REPLICATE
wrapped.commit(iTx, callback);
else {
|
cce874510408305fed6e01ab5b70ab753f9ac693
|
ReactiveX-RxJava
|
Beef up UnsubscribeTester--
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/subjects/RepeatSubject.java b/rxjava-core/src/main/java/rx/subjects/RepeatSubject.java
index 1d0eb6f145..7ab3a9e2c7 100644
--- a/rxjava-core/src/main/java/rx/subjects/RepeatSubject.java
+++ b/rxjava-core/src/main/java/rx/subjects/RepeatSubject.java
@@ -2,10 +2,13 @@
import org.junit.Test;
import org.mockito.Mockito;
+import rx.Observable;
import rx.Observer;
import rx.Subscription;
import rx.subscriptions.Subscriptions;
import rx.testing.UnsubscribeTester;
+import rx.util.functions.Action1;
+import rx.util.functions.Func0;
import rx.util.functions.Func1;
import java.util.ArrayList;
@@ -266,43 +269,38 @@ private void assertObservedUntilTwo(Observer<String> aObserver)
}
@Test
- public void testUnsubscribeFromOnNext() {
- RepeatSubject<Object> subject = RepeatSubject.create();
-
- UnsubscribeTester test1 = UnsubscribeTester.createOnNext(subject);
- UnsubscribeTester test2 = UnsubscribeTester.createOnNext(subject);
-
- subject.onNext("one");
-
- test1.assertPassed();
- test2.assertPassed();
- }
-
- @Test
- public void testUnsubscribeFromOnCompleted() {
- RepeatSubject<Object> subject = RepeatSubject.create();
-
- UnsubscribeTester test1 = UnsubscribeTester.createOnCompleted(subject);
- UnsubscribeTester test2 = UnsubscribeTester.createOnCompleted(subject);
-
- subject.onCompleted();
-
- test1.assertPassed();
- test2.assertPassed();
- }
-
- @Test
- public void testUnsubscribeFromOnError() {
- RepeatSubject<Object> subject = RepeatSubject.create();
-
- UnsubscribeTester test1 = UnsubscribeTester.createOnError(subject);
- UnsubscribeTester test2 = UnsubscribeTester.createOnError(subject);
-
- subject.onError(new Exception());
-
- test1.assertPassed();
- test2.assertPassed();
+ public void testUnsubscribe()
+ {
+ UnsubscribeTester.test(new Func0<RepeatSubject<Object>>()
+ {
+ @Override
+ public RepeatSubject<Object> call()
+ {
+ return RepeatSubject.create();
+ }
+ }, new Action1<RepeatSubject<Object>>()
+ {
+ @Override
+ public void call(RepeatSubject<Object> repeatSubject)
+ {
+ repeatSubject.onCompleted();
+ }
+ }, new Action1<RepeatSubject<Object>>()
+ {
+ @Override
+ public void call(RepeatSubject<Object> repeatSubject)
+ {
+ repeatSubject.onError(new Exception());
+ }
+ }, new Action1<RepeatSubject<Object>>()
+ {
+ @Override
+ public void call(RepeatSubject<Object> repeatSubject)
+ {
+ repeatSubject.onNext("one");
+ }
+ }
+ );
}
-
}
}
diff --git a/rxjava-core/src/main/java/rx/testing/UnsubscribeTester.java b/rxjava-core/src/main/java/rx/testing/UnsubscribeTester.java
index e1988c9093..08607f2734 100644
--- a/rxjava-core/src/main/java/rx/testing/UnsubscribeTester.java
+++ b/rxjava-core/src/main/java/rx/testing/UnsubscribeTester.java
@@ -3,6 +3,8 @@
import rx.Observable;
import rx.Observer;
import rx.Subscription;
+import rx.util.functions.Action1;
+import rx.util.functions.Func0;
import static org.junit.Assert.assertTrue;
@@ -12,7 +14,44 @@ public class UnsubscribeTester
public UnsubscribeTester() {}
- public static <T> UnsubscribeTester createOnNext(Observable<T> observable)
+ /**
+ * Tests the unsubscription semantics of an observable.
+ *
+ * @param provider Function that when called provides an instance of the observable being tested
+ * @param generateOnCompleted Causes an observer generated by @param provider to generate an onCompleted event. Null to not test onCompleted.
+ * @param generateOnError Causes an observer generated by @param provider to generate an onError event. Null to not test onError.
+ * @param generateOnNext Causes an observer generated by @param provider to generate an onNext event. Null to not test onNext.
+ * @param <T> The type of object passed by the Observable
+ */
+ public static <T, O extends Observable<T>> void test(Func0<O> provider, Action1<? super O> generateOnCompleted, Action1<? super O> generateOnError, Action1<? super O> generateOnNext)
+ {
+ if (generateOnCompleted != null) {
+ O observable = provider.call();
+ UnsubscribeTester tester1 = createOnCompleted(observable);
+ UnsubscribeTester tester2 = createOnCompleted(observable);
+ generateOnCompleted.call(observable);
+ tester1.assertPassed();
+ tester2.assertPassed();
+ }
+ if (generateOnError != null) {
+ O observable = provider.call();
+ UnsubscribeTester tester1 = createOnError(observable);
+ UnsubscribeTester tester2 = createOnError(observable);
+ generateOnError.call(observable);
+ tester1.assertPassed();
+ tester2.assertPassed();
+ }
+ if (generateOnNext != null) {
+ O observable = provider.call();
+ UnsubscribeTester tester1 = createOnNext(observable);
+ UnsubscribeTester tester2 = createOnNext(observable);
+ generateOnNext.call(observable);
+ tester1.assertPassed();
+ tester2.assertPassed();
+ }
+ }
+
+ private static <T> UnsubscribeTester createOnCompleted(Observable<T> observable)
{
final UnsubscribeTester test = new UnsubscribeTester();
test.setSubscription(observable.subscribe(new Observer<T>()
@@ -20,6 +59,7 @@ public static <T> UnsubscribeTester createOnNext(Observable<T> observable)
@Override
public void onCompleted()
{
+ test.doUnsubscribe();
}
@Override
@@ -30,13 +70,12 @@ public void onError(Exception e)
@Override
public void onNext(T args)
{
- test.doUnsubscribe();
}
}));
return test;
}
- public static <T> UnsubscribeTester createOnCompleted(Observable<T> observable)
+ private static <T> UnsubscribeTester createOnError(Observable<T> observable)
{
final UnsubscribeTester test = new UnsubscribeTester();
test.setSubscription(observable.subscribe(new Observer<T>()
@@ -44,12 +83,12 @@ public static <T> UnsubscribeTester createOnCompleted(Observable<T> observable)
@Override
public void onCompleted()
{
- test.doUnsubscribe();
}
@Override
public void onError(Exception e)
{
+ test.doUnsubscribe();
}
@Override
@@ -60,7 +99,7 @@ public void onNext(T args)
return test;
}
- public static <T> UnsubscribeTester createOnError(Observable<T> observable)
+ private static <T> UnsubscribeTester createOnNext(Observable<T> observable)
{
final UnsubscribeTester test = new UnsubscribeTester();
test.setSubscription(observable.subscribe(new Observer<T>()
@@ -73,12 +112,12 @@ public void onCompleted()
@Override
public void onError(Exception e)
{
- test.doUnsubscribe();
}
@Override
public void onNext(T args)
{
+ test.doUnsubscribe();
}
}));
return test;
@@ -96,7 +135,7 @@ private void doUnsubscribe()
subscription.unsubscribe();
}
- public void assertPassed()
+ private void assertPassed()
{
assertTrue("expected notification was received", subscription == null);
}
|
233ca3e2a5145f006316f899e61a02bf97c38e75
|
hadoop
|
YARN-2122. In AllocationFileLoaderService, the- reloadThread should be created in init() and started in start(). (Robert- Kanter via kasha)--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1601046 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index ac1dcb7193d5c..236208215a0d1 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -124,6 +124,9 @@ Release 2.5.0 - UNRELEASED
YARN-1977. Add tests on getApplicationRequest with filtering start time range. (junping_du)
+ YARN-2122. In AllocationFileLoaderService, the reloadThread should be created
+ in init() and started in start(). (Robert Kanter via kasha)
+
OPTIMIZATIONS
BUG FIXES
@@ -150,6 +153,7 @@ Release 2.5.0 - UNRELEASED
YARN-2011. Fix typo and warning in TestLeafQueue (Chen He via junping_du)
+
YARN-2042. String shouldn't be compared using == in
QueuePlacementRule#NestedUserQueue#getQueueForApp (Chen He via Sandy Ryza)
diff --git a/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml b/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
index 79ce7460c2137..2da958a08bf5d 100644
--- a/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
+++ b/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
@@ -142,6 +142,11 @@
<Class name="org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.LogAggregationService" />
<Bug pattern="IS2_INCONSISTENT_SYNC" />
</Match>
+ <Match>
+ <Class name="org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.AllocationFileLoaderService" />
+ <Field name="allocFile" />
+ <Bug pattern="IS2_INCONSISTENT_SYNC" />
+ </Match>
<!-- Inconsistent sync warning - minimumAllocation is only initialized once and never changed -->
<Match>
<Class name="org.apache.hadoop.yarn.server.resourcemanager.scheduler.AbstractYarnScheduler" />
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java
index 3a962a8ce52a8..064bdfc817f61 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationFileLoaderService.java
@@ -98,55 +98,59 @@ public AllocationFileLoaderService(Clock clock) {
}
@Override
- public void init(Configuration conf) {
+ public void serviceInit(Configuration conf) throws Exception {
this.allocFile = getAllocationFile(conf);
- super.init(conf);
- }
-
- @Override
- public void start() {
- if (allocFile == null) {
- return;
- }
- reloadThread = new Thread() {
- public void run() {
- while (running) {
- long time = clock.getTime();
- long lastModified = allocFile.lastModified();
- if (lastModified > lastSuccessfulReload &&
- time > lastModified + ALLOC_RELOAD_WAIT_MS) {
- try {
- reloadAllocations();
- } catch (Exception ex) {
+ if (allocFile != null) {
+ reloadThread = new Thread() {
+ @Override
+ public void run() {
+ while (running) {
+ long time = clock.getTime();
+ long lastModified = allocFile.lastModified();
+ if (lastModified > lastSuccessfulReload &&
+ time > lastModified + ALLOC_RELOAD_WAIT_MS) {
+ try {
+ reloadAllocations();
+ } catch (Exception ex) {
+ if (!lastReloadAttemptFailed) {
+ LOG.error("Failed to reload fair scheduler config file - " +
+ "will use existing allocations.", ex);
+ }
+ lastReloadAttemptFailed = true;
+ }
+ } else if (lastModified == 0l) {
if (!lastReloadAttemptFailed) {
- LOG.error("Failed to reload fair scheduler config file - " +
- "will use existing allocations.", ex);
+ LOG.warn("Failed to reload fair scheduler config file because" +
+ " last modified returned 0. File exists: "
+ + allocFile.exists());
}
lastReloadAttemptFailed = true;
}
- } else if (lastModified == 0l) {
- if (!lastReloadAttemptFailed) {
- LOG.warn("Failed to reload fair scheduler config file because" +
- " last modified returned 0. File exists: " + allocFile.exists());
+ try {
+ Thread.sleep(reloadIntervalMs);
+ } catch (InterruptedException ex) {
+ LOG.info(
+ "Interrupted while waiting to reload alloc configuration");
}
- lastReloadAttemptFailed = true;
- }
- try {
- Thread.sleep(reloadIntervalMs);
- } catch (InterruptedException ex) {
- LOG.info("Interrupted while waiting to reload alloc configuration");
}
}
- }
- };
- reloadThread.setName("AllocationFileReloader");
- reloadThread.setDaemon(true);
- reloadThread.start();
- super.start();
+ };
+ reloadThread.setName("AllocationFileReloader");
+ reloadThread.setDaemon(true);
+ }
+ super.serviceInit(conf);
+ }
+
+ @Override
+ public void serviceStart() throws Exception {
+ if (reloadThread != null) {
+ reloadThread.start();
+ }
+ super.serviceStart();
}
@Override
- public void stop() {
+ public void serviceStop() throws Exception {
running = false;
if (reloadThread != null) {
reloadThread.interrupt();
@@ -156,7 +160,7 @@ public void stop() {
LOG.warn("reloadThread fails to join.");
}
}
- super.stop();
+ super.serviceStop();
}
/**
|
edc59a680a0c147a19b0f5f216db8553b6e7dc57
|
drools
|
- fixed an issue where a comment in a work item- definition file results in a null work item definition object, filtering out- null now--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@26933 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-core/src/main/java/org/drools/RuleBaseConfiguration.java b/drools-core/src/main/java/org/drools/RuleBaseConfiguration.java
index 3b490a1b18d..bcbf3aa832e 100755
--- a/drools-core/src/main/java/org/drools/RuleBaseConfiguration.java
+++ b/drools-core/src/main/java/org/drools/RuleBaseConfiguration.java
@@ -812,31 +812,33 @@ private void loadWorkItems(String location) {
List<Map<String, Object>> workDefinitionsMap = (List<Map<String, Object>>) MVEL.eval( content,
new HashMap() );
for ( Map<String, Object> workDefinitionMap : workDefinitionsMap ) {
- WorkDefinitionExtensionImpl workDefinition = new WorkDefinitionExtensionImpl();
- workDefinition.setName( (String) workDefinitionMap.get( "name" ) );
- workDefinition.setDisplayName( (String) workDefinitionMap.get( "displayName" ) );
- workDefinition.setIcon( (String) workDefinitionMap.get( "icon" ) );
- workDefinition.setCustomEditor( (String) workDefinitionMap.get( "customEditor" ) );
- Set<ParameterDefinition> parameters = new HashSet<ParameterDefinition>();
- Map<String, DataType> parameterMap = (Map<String, DataType>) workDefinitionMap.get( "parameters" );
- if ( parameterMap != null ) {
- for ( Map.Entry<String, DataType> entry : parameterMap.entrySet() ) {
- parameters.add( new ParameterDefinitionImpl( entry.getKey(),
- entry.getValue() ) );
- }
- }
- workDefinition.setParameters( parameters );
- Set<ParameterDefinition> results = new HashSet<ParameterDefinition>();
- Map<String, DataType> resultMap = (Map<String, DataType>) workDefinitionMap.get( "results" );
- if ( resultMap != null ) {
- for ( Map.Entry<String, DataType> entry : resultMap.entrySet() ) {
- results.add( new ParameterDefinitionImpl( entry.getKey(),
- entry.getValue() ) );
- }
- }
- workDefinition.setResults( results );
- this.workDefinitions.put( workDefinition.getName(),
- workDefinition );
+ if (workDefinitionMap != null) {
+ WorkDefinitionExtensionImpl workDefinition = new WorkDefinitionExtensionImpl();
+ workDefinition.setName( (String) workDefinitionMap.get( "name" ) );
+ workDefinition.setDisplayName( (String) workDefinitionMap.get( "displayName" ) );
+ workDefinition.setIcon( (String) workDefinitionMap.get( "icon" ) );
+ workDefinition.setCustomEditor( (String) workDefinitionMap.get( "customEditor" ) );
+ Set<ParameterDefinition> parameters = new HashSet<ParameterDefinition>();
+ Map<String, DataType> parameterMap = (Map<String, DataType>) workDefinitionMap.get( "parameters" );
+ if ( parameterMap != null ) {
+ for ( Map.Entry<String, DataType> entry : parameterMap.entrySet() ) {
+ parameters.add( new ParameterDefinitionImpl( entry.getKey(),
+ entry.getValue() ) );
+ }
+ }
+ workDefinition.setParameters( parameters );
+ Set<ParameterDefinition> results = new HashSet<ParameterDefinition>();
+ Map<String, DataType> resultMap = (Map<String, DataType>) workDefinitionMap.get( "results" );
+ if ( resultMap != null ) {
+ for ( Map.Entry<String, DataType> entry : resultMap.entrySet() ) {
+ results.add( new ParameterDefinitionImpl( entry.getKey(),
+ entry.getValue() ) );
+ }
+ }
+ workDefinition.setResults( results );
+ this.workDefinitions.put( workDefinition.getName(),
+ workDefinition );
+ }
}
} catch ( Throwable t ) {
System.err.println( "Error occured while loading work definitions " + location );
|
72a3aef6b6df020c0198689a88690a167653fcec
|
orientdb
|
removed nasty warnings in compilation--
|
p
|
https://github.com/orientechnologies/orientdb
|
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/FullTextIndexerTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/FullTextIndexerTest.java
index 317bdf1aedd..1b447e5b352 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/FullTextIndexerTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/FullTextIndexerTest.java
@@ -29,7 +29,7 @@ public static void main(String[] iArgs) throws InstantiationException, IllegalAc
document.field("surname", "Miner");
document
.field("description",
- "Jay Glenn Miner (May 31, 1932 - June 20, 1994), was a famous integrated circuit designer, known primarily for his "
+ "Jay Glenn Miner (May 31, 1932 to June 20, 1994), was a famous integrated circuit designer, known primarily for his "
+ "work in multimedia chips and as the 'father of the Amiga'[1]. He received a BS in EECS from "
+ "UC Berkeley in 1959. Miner started in the electronics industry with a number of designs in the "
+ "medical world, including a remote-control pacemaker. He moved to Atari in the late 1970s. One of "
|
f372f7c109b550c6b20b8196713aa313ad6c249f
|
elasticsearch
|
Cut over StringScriptDataComparator to use- BytesRef instead of Strings--Closes -2920-
|
c
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/StringScriptDataComparator.java b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/StringScriptDataComparator.java
index 393c5ab5a9581..8ed32896e9aad 100644
--- a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/StringScriptDataComparator.java
+++ b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/StringScriptDataComparator.java
@@ -23,6 +23,7 @@
import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SortField;
+import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.script.SearchScript;
@@ -31,7 +32,7 @@
/**
*
*/
-public class StringScriptDataComparator extends FieldComparator<String> {
+public class StringScriptDataComparator extends FieldComparator<BytesRef> {
public static IndexFieldData.XFieldComparatorSource comparatorSource(SearchScript script) {
return new InnerSource(script);
@@ -58,18 +59,23 @@ public SortField.Type reducedType() {
private final SearchScript script;
- private String[] values;
+ private BytesRef[] values;
- private String bottom;
+ private BytesRef bottom;
+
+ private BytesRef spare = new BytesRef();
+
+ private int spareDoc = -1;
public StringScriptDataComparator(int numHits, SearchScript script) {
this.script = script;
- values = new String[numHits];
+ values = new BytesRef[numHits];
}
@Override
- public FieldComparator<String> setNextReader(AtomicReaderContext context) throws IOException {
+ public FieldComparator<BytesRef> setNextReader(AtomicReaderContext context) throws IOException {
script.setNextReader(context);
+ spareDoc = -1;
return this;
}
@@ -80,8 +86,8 @@ public void setScorer(Scorer scorer) {
@Override
public int compare(int slot1, int slot2) {
- final String val1 = values[slot1];
- final String val2 = values[slot2];
+ final BytesRef val1 = values[slot1];
+ final BytesRef val2 = values[slot2];
if (val1 == null) {
if (val2 == null) {
return 0;
@@ -96,30 +102,38 @@ public int compare(int slot1, int slot2) {
@Override
public int compareBottom(int doc) {
- script.setNextDocId(doc);
- final String val2 = script.run().toString();
+
if (bottom == null) {
- if (val2 == null) {
- return 0;
- }
return -1;
- } else if (val2 == null) {
- return 1;
}
- return bottom.compareTo(val2);
+ return bottom.compareTo(spare);
}
@Override
- public int compareDocToValue(int doc, String val2) throws IOException {
+ public int compareDocToValue(int doc, BytesRef val2) throws IOException {
script.setNextDocId(doc);
- String val1 = script.run().toString();
- return val1.compareTo(val2);
+ setSpare(doc);
+ return spare.compareTo(val2);
+ }
+
+ private void setSpare(int doc) {
+ if (spareDoc == doc) {
+ return;
+ }
+
+ script.setNextDocId(doc);
+ spare.copyChars(script.run().toString());
+ spareDoc = doc;
}
@Override
public void copy(int slot, int doc) {
- script.setNextDocId(doc);
- values[slot] = script.run().toString();
+ setSpare(doc);
+ if (values[slot] == null) {
+ values[slot] = BytesRef.deepCopyOf(spare);
+ } else {
+ values[slot].copyBytes(spare);
+ }
}
@Override
@@ -128,7 +142,7 @@ public void setBottom(final int bottom) {
}
@Override
- public String value(int slot) {
+ public BytesRef value(int slot) {
return values[slot];
}
}
diff --git a/src/test/java/org/elasticsearch/test/integration/search/sort/SimpleSortTests.java b/src/test/java/org/elasticsearch/test/integration/search/sort/SimpleSortTests.java
index a5211a5ba7483..9c6ea5fad251b 100644
--- a/src/test/java/org/elasticsearch/test/integration/search/sort/SimpleSortTests.java
+++ b/src/test/java/org/elasticsearch/test/integration/search/sort/SimpleSortTests.java
@@ -431,6 +431,40 @@ private void testSimpleSorts(int numberOfShards) throws Exception {
assertThat(searchResponse.toString(), not(containsString("error")));
}
+ @Test
+ public void testSortScript() throws IOException {
+ try {
+ client.admin().indices().prepareDelete("test").execute().actionGet();
+ } catch (Exception e) {
+ // ignore
+ }
+
+ String mapping = jsonBuilder().startObject().startObject("profile").field("dynamic", "strict")
+ .startObject("properties")
+ .startObject("id").field("type", "integer").field("index", "not_analyzed").field("store", true).endObject()
+ .startObject("groups_code").startObject("properties").field("type", "integer").field("index", "not_analyzed").endObject().endObject()
+ .startObject("date").field("type", "date").field("index", "not_analyzed").field("format", "date_time_no_millis").endObject()
+ .endObject().endObject().endObject().string();
+ client.admin().indices().prepareCreate("test")
+ .setSettings(ImmutableSettings.settingsBuilder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0)).addMapping("test", mapping);
+
+ client.prepareIndex("test", "test", "1").setSource(jsonBuilder().startObject()
+ .startArray("groups_code").startObject().field("id", 47642).field("date", "2010-08-12T07:54:55Z").endObject().endArray()
+ .endObject()).execute().actionGet();
+ client.prepareIndex("test", "test", "2").setSource(jsonBuilder().startObject()
+ .startArray("groups_code").startObject().field("id", 47642).field("date", "2010-05-04T12:10:54Z").endObject().endArray()
+ .endObject()).execute().actionGet();
+ client.admin().indices().prepareRefresh("test").execute().actionGet();
+
+ SearchResponse searchResponse = client.prepareSearch()
+ .setQuery(matchAllQuery())
+ .addSort(SortBuilders.scriptSort("\u0027\u0027", "string")).setSize(10)
+ .execute().actionGet();
+
+ assertThat("Failures " + Arrays.toString(searchResponse.getShardFailures()), searchResponse.getShardFailures().length, equalTo(0));
+
+ }
+
@Test
public void testSortMinValueScript() throws IOException {
try {
|
4c0368c587a32fe1e828793e66e41b017fe41605
|
orientdb
|
Fixed issue about out*() functions--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/graphdb/src/main/java/com/orientechnologies/orient/graph/sql/functions/OSQLFunctionLabel.java b/graphdb/src/main/java/com/orientechnologies/orient/graph/sql/functions/OSQLFunctionLabel.java
index 183ec25efe2..046cb49742f 100644
--- a/graphdb/src/main/java/com/orientechnologies/orient/graph/sql/functions/OSQLFunctionLabel.java
+++ b/graphdb/src/main/java/com/orientechnologies/orient/graph/sql/functions/OSQLFunctionLabel.java
@@ -44,13 +44,13 @@ public Object execute(final OIdentifiable iCurrentRecord, final Object iCurrentR
OCommandContext iContext) {
final OrientBaseGraph graph = OGraphCommandExecutorSQLFactory.getGraph();
- if (iCurrentRecord == null) {
+ if (iParameters != null && iParameters.length > 0 && iParameters[0] != null) {
return OSQLEngine.foreachRecord(new OCallable<Object, OIdentifiable>() {
@Override
public Object call(final OIdentifiable iArgument) {
return getLabel(graph, iArgument);
}
- }, iCurrentRecord, iContext);
+ }, iParameters[0], iContext);
} else
return getLabel(graph, iCurrentRecord);
}
diff --git a/graphdb/src/main/java/com/orientechnologies/orient/graph/sql/functions/OSQLFunctionMove.java b/graphdb/src/main/java/com/orientechnologies/orient/graph/sql/functions/OSQLFunctionMove.java
index a1f3ee7452c..c900c4f6077 100644
--- a/graphdb/src/main/java/com/orientechnologies/orient/graph/sql/functions/OSQLFunctionMove.java
+++ b/graphdb/src/main/java/com/orientechnologies/orient/graph/sql/functions/OSQLFunctionMove.java
@@ -69,13 +69,13 @@ public Object call(final Object iArgument) {
else
labels = null;
- if (iCurrentResult != null)
+ if (iParameters != null && iParameters.length > 0 && iParameters[0] != null)
return OSQLEngine.foreachRecord(new OCallable<Object, OIdentifiable>() {
@Override
public Object call(final OIdentifiable iArgument) {
return move(graph, iArgument, labels);
}
- }, iCurrentRecord, iContext);
+ }, iParameters[0], iContext);
else
return move(graph, iCurrentRecord.getRecord(), labels);
}
|
fe6e166c17990070a64c0b15d91b283a18c9dec5
|
intellij-community
|
XML rename dialog doesn't accept character '.'- or '-' (IDEADEV-35547)--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/xml/openapi/src/com/intellij/patterns/XmlTagPattern.java b/xml/openapi/src/com/intellij/patterns/XmlTagPattern.java
index 0ff218992457e..636c2b840b3d4 100644
--- a/xml/openapi/src/com/intellij/patterns/XmlTagPattern.java
+++ b/xml/openapi/src/com/intellij/patterns/XmlTagPattern.java
@@ -2,6 +2,7 @@
import com.intellij.openapi.util.Comparing;
import com.intellij.psi.xml.XmlTag;
+import com.intellij.psi.meta.PsiMetaData;
import com.intellij.util.ProcessingContext;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
@@ -55,6 +56,14 @@ public boolean accepts(@NotNull final XmlTag xmlTag, final ProcessingContext con
});
}
+ public Self withDescriptor(@NotNull final ElementPattern<? extends PsiMetaData> metaDataPattern) {
+ return with(new PatternCondition<XmlTag>("withDescriptor") {
+ public boolean accepts(@NotNull final XmlTag xmlTag, final ProcessingContext context) {
+ return metaDataPattern.accepts(xmlTag.getDescriptor());
+ }
+ });
+ }
+
public Self isFirstSubtag(@NotNull final ElementPattern pattern) {
return with(new PatternCondition<XmlTag>("isFirstSubtag") {
public boolean accepts(@NotNull final XmlTag xmlTag, final ProcessingContext context) {
|
b25462ddf9dcbf0bdc940e7ab1c796520d694093
|
spring-framework
|
SPR-7107 - RestTemplate/UriTemplate/UriUtils- improperly encoding UTF-8--
|
c
|
https://github.com/spring-projects/spring-framework
|
diff --git a/org.springframework.web/src/main/java/org/springframework/web/util/UriUtils.java b/org.springframework.web/src/main/java/org/springframework/web/util/UriUtils.java
index 4b023395e3da..67da06b8064b 100644
--- a/org.springframework.web/src/main/java/org/springframework/web/util/UriUtils.java
+++ b/org.springframework.web/src/main/java/org/springframework/web/util/UriUtils.java
@@ -424,22 +424,33 @@ private static String encode(String source, String encoding, BitSet notEncoded)
throws UnsupportedEncodingException {
Assert.notNull(source, "'source' must not be null");
Assert.hasLength(encoding, "'encoding' must not be empty");
- ByteArrayOutputStream bos = new ByteArrayOutputStream(source.length() * 2);
- for (int i = 0; i < source.length(); i++) {
- int ch = source.charAt(i);
- if (notEncoded.get(ch)) {
- bos.write(ch);
+ byte[] bytes = encode(source.getBytes(encoding), notEncoded);
+ return new String(bytes, "US-ASCII");
+ }
+
+ private static byte[] encode(byte[] source, BitSet notEncoded) {
+ Assert.notNull(source, "'source' must not be null");
+
+ ByteArrayOutputStream bos = new ByteArrayOutputStream(source.length * 2);
+
+ for (int i = 0; i < source.length; i++) {
+ int b = source[i];
+ if (b < 0) {
+ b += 256;
+ }
+ if (notEncoded.get(b)) {
+ bos.write(b);
}
else {
bos.write('%');
- char hex1 = Character.toUpperCase(Character.forDigit((ch >> 4) & 0xF, 16));
- char hex2 = Character.toUpperCase(Character.forDigit(ch & 0xF, 16));
+ char hex1 = Character.toUpperCase(Character.forDigit((b >> 4) & 0xF, 16));
+ char hex2 = Character.toUpperCase(Character.forDigit(b & 0xF, 16));
bos.write(hex1);
bos.write(hex2);
}
}
- return new String(bos.toByteArray(), encoding);
+ return bos.toByteArray();
}
/**
diff --git a/org.springframework.web/src/test/java/org/springframework/web/client/RestTemplateIntegrationTests.java b/org.springframework.web/src/test/java/org/springframework/web/client/RestTemplateIntegrationTests.java
index 78fa26c6f042..5f795306a304 100644
--- a/org.springframework.web/src/test/java/org/springframework/web/client/RestTemplateIntegrationTests.java
+++ b/org.springframework.web/src/test/java/org/springframework/web/client/RestTemplateIntegrationTests.java
@@ -165,7 +165,7 @@ public void optionsForAllow() throws URISyntaxException {
@Test
public void uri() throws InterruptedException, URISyntaxException {
String result = template.getForObject(URI + "/uri/{query}", String.class, "Z\u00fcrich");
- assertEquals("Invalid request URI", "/uri/Z%FCrich", result);
+ assertEquals("Invalid request URI", "/uri/Z%C3%BCrich", result);
result = template.getForObject(URI + "/uri/query={query}", String.class, "foo@bar");
assertEquals("Invalid request URI", "/uri/query=foo@bar", result);
diff --git a/org.springframework.web/src/test/java/org/springframework/web/util/UriTemplateTests.java b/org.springframework.web/src/test/java/org/springframework/web/util/UriTemplateTests.java
index 58bafcd6fbf8..7c7c07833a2f 100644
--- a/org.springframework.web/src/test/java/org/springframework/web/util/UriTemplateTests.java
+++ b/org.springframework.web/src/test/java/org/springframework/web/util/UriTemplateTests.java
@@ -87,9 +87,9 @@ public void expandMapUnboundVariables() throws Exception {
@Test
public void expandEncoded() throws Exception {
- UriTemplate template = new UriTemplate("http://example.com//hotel list/{hotel}");
+ UriTemplate template = new UriTemplate("http://example.com/hotel list/{hotel}");
URI result = template.expand("Z\u00fcrich");
- assertEquals("Invalid expanded template", new URI("http://example.com//hotel%20list/Z%FCrich"), result);
+ assertEquals("Invalid expanded template", new URI("http://example.com/hotel%20list/Z%C3%BCrich"), result);
}
@Test
diff --git a/org.springframework.web/src/test/java/org/springframework/web/util/UriUtilsTest.java b/org.springframework.web/src/test/java/org/springframework/web/util/UriUtilsTest.java
index a3713d1efa24..36fdae0a3961 100644
--- a/org.springframework.web/src/test/java/org/springframework/web/util/UriUtilsTest.java
+++ b/org.springframework.web/src/test/java/org/springframework/web/util/UriUtilsTest.java
@@ -53,7 +53,7 @@ public void encodePort() throws UnsupportedEncodingException {
public void encodePath() throws UnsupportedEncodingException {
assertEquals("Invalid encoded result", "/foo/bar", UriUtils.encodePath("/foo/bar", ENC));
assertEquals("Invalid encoded result", "/foo%20bar", UriUtils.encodePath("/foo bar", ENC));
- assertEquals("Invalid encoded result", "/Z%FCrich", UriUtils.encodePath("/Z\u00fcrich", ENC));
+ assertEquals("Invalid encoded result", "/Z%C3%BCrich", UriUtils.encodePath("/Z\u00fcrich", ENC));
}
@Test
@@ -67,6 +67,7 @@ public void encodeQuery() throws UnsupportedEncodingException {
assertEquals("Invalid encoded result", "foobar", UriUtils.encodeQuery("foobar", ENC));
assertEquals("Invalid encoded result", "foo%20bar", UriUtils.encodeQuery("foo bar", ENC));
assertEquals("Invalid encoded result", "foobar/+", UriUtils.encodeQuery("foobar/+", ENC));
+ assertEquals("Invalid encoded result", "T%C5%8Dky%C5%8D", UriUtils.encodeQuery("T\u014dky\u014d", ENC));
}
@Test
@@ -101,8 +102,8 @@ public void encodeUri() throws UnsupportedEncodingException {
UriUtils.encodeUri("http://www.ietf.org/rfc/rfc3986.txt", ENC));
assertEquals("Invalid encoded URI", "https://www.ietf.org/rfc/rfc3986.txt",
UriUtils.encodeUri("https://www.ietf.org/rfc/rfc3986.txt", ENC));
- assertEquals("Invalid encoded URI", "http://www.google.com/?q=z%FCrich",
- UriUtils.encodeUri("http://www.google.com/?q=z\u00fcrich", ENC));
+ assertEquals("Invalid encoded URI", "http://www.google.com/?q=Z%C3%BCrich",
+ UriUtils.encodeUri("http://www.google.com/?q=Z\u00fcrich", ENC));
assertEquals("Invalid encoded URI",
"http://arjen:[email protected]:80/javase/6/docs/api/java/util/BitSet.html?foo=bar#and(java.util.BitSet)",
UriUtils.encodeUri(
@@ -130,8 +131,8 @@ public void encodeHttpUrl() throws UnsupportedEncodingException {
UriUtils.encodeHttpUrl("http://www.ietf.org/rfc/rfc3986.txt", ENC));
assertEquals("Invalid encoded URI", "https://www.ietf.org/rfc/rfc3986.txt",
UriUtils.encodeHttpUrl("https://www.ietf.org/rfc/rfc3986.txt", ENC));
- assertEquals("Invalid encoded HTTP URL", "http://www.google.com/?q=z%FCrich",
- UriUtils.encodeHttpUrl("http://www.google.com/?q=z\u00fcrich", ENC));
+ assertEquals("Invalid encoded HTTP URL", "http://www.google.com/?q=Z%C3%BCrich",
+ UriUtils.encodeHttpUrl("http://www.google.com/?q=Z\u00fcrich", ENC));
assertEquals("Invalid encoded HTTP URL",
"http://arjen:[email protected]:80/javase/6/docs/api/java/util/BitSet.html?foo=bar",
UriUtils.encodeHttpUrl(
|
f06af44932d31d1a787425f853f142c98d74f143
|
hbase
|
HBASE-9095. AssignmentManager's handleRegion- should respect the single threaded nature of the processing--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1510799 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
index df8c6090f30b..dc2c0e43ed01 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
@@ -159,6 +159,17 @@ public class AssignmentManager extends ZooKeeperListener {
private final ExecutorService executorService;
+ // For unit tests, keep track of calls to ClosedRegionHandler
+ private Map<HRegionInfo, AtomicBoolean> closedRegionHandlerCalled =
+ new HashMap<HRegionInfo, AtomicBoolean>();
+
+ // For unit tests, keep track of calls to OpenedRegionHandler
+ private Map<HRegionInfo, AtomicBoolean> openedRegionHandlerCalled =
+ new HashMap<HRegionInfo, AtomicBoolean>();
+
+ // For unit tests, keep track of calls to SplitRegionHandler
+ private AtomicBoolean splitRegionHandlerCalled = new AtomicBoolean(false);
+
//Thread pool executor service for timeout monitor
private java.util.concurrent.ExecutorService threadPoolExecutorService;
@@ -836,8 +847,8 @@ private void handleRegion(final RegionTransition rt, int expectedVersion) {
break;
}
// Run handler to do the rest of the SPLIT handling.
- this.executorService.submit(new SplitRegionHandler(server, this,
- regionState.getRegion(), sn, daughters));
+ new SplitRegionHandler(server, this, regionState.getRegion(), sn, daughters).process();
+ splitRegionHandlerCalled.set(true);
break;
case RS_ZK_REGION_MERGING:
@@ -872,8 +883,7 @@ private void handleRegion(final RegionTransition rt, int expectedVersion) {
+ merge_a + ", rs_b=" + merge_b);
}
// Run handler to do the rest of the MERGED handling.
- this.executorService.submit(new MergedRegionHandler(
- server, this, sn, mergeRegions));
+ new MergedRegionHandler(server, this, sn, mergeRegions).process();
break;
case M_ZK_REGION_CLOSING:
@@ -907,8 +917,8 @@ private void handleRegion(final RegionTransition rt, int expectedVersion) {
regionState = regionStates.updateRegionState(rt, RegionState.State.CLOSED);
if (regionState != null) {
removeClosedRegion(regionState.getRegion());
- this.executorService.submit(new ClosedRegionHandler(server,
- this, regionState.getRegion()));
+ new ClosedRegionHandler(server, this, regionState.getRegion()).process();
+ closedRegionHandlerCalled.put(regionState.getRegion(), new AtomicBoolean(true));
}
break;
@@ -941,8 +951,7 @@ private void handleRegion(final RegionTransition rt, int expectedVersion) {
// When there are more than one region server a new RS is selected as the
// destination and the same is updated in the regionplan. (HBASE-5546)
getRegionPlan(regionState.getRegion(), sn, true);
- this.executorService.submit(new ClosedRegionHandler(server,
- this, regionState.getRegion()));
+ new ClosedRegionHandler(server, this, regionState.getRegion()).process();
}
}
break;
@@ -980,8 +989,9 @@ private void handleRegion(final RegionTransition rt, int expectedVersion) {
regionState = regionStates.updateRegionState(rt, RegionState.State.OPEN);
if (regionState != null) {
failedOpenTracker.remove(encodedName); // reset the count, if any
- this.executorService.submit(new OpenedRegionHandler(
- server, this, regionState.getRegion(), sn, expectedVersion));
+ new OpenedRegionHandler(
+ server, this, regionState.getRegion(), sn, expectedVersion).process();
+ openedRegionHandlerCalled.put(regionState.getRegion(), new AtomicBoolean(true));
}
break;
@@ -993,6 +1003,32 @@ private void handleRegion(final RegionTransition rt, int expectedVersion) {
}
}
+ //For unit tests only
+ boolean wasClosedHandlerCalled(HRegionInfo hri) {
+ AtomicBoolean b = closedRegionHandlerCalled.get(hri);
+ //compareAndSet to be sure that unit tests don't see stale values. Means,
+ //we will return true exactly once unless the handler code resets to true
+ //this value.
+ return b == null ? false : b.compareAndSet(true, false);
+ }
+
+ //For unit tests only
+ boolean wasOpenedHandlerCalled(HRegionInfo hri) {
+ AtomicBoolean b = openedRegionHandlerCalled.get(hri);
+ //compareAndSet to be sure that unit tests don't see stale values. Means,
+ //we will return true exactly once unless the handler code resets to true
+ //this value.
+ return b == null ? false : b.compareAndSet(true, false);
+ }
+
+ //For unit tests only
+ boolean wasSplitHandlerCalled() {
+ //compareAndSet to be sure that unit tests don't see stale values. Means,
+ //we will return true exactly once unless the handler code resets to true
+ //this value.
+ return splitRegionHandlerCalled.compareAndSet(true, false);
+ }
+
/**
* @return Returns true if this RegionState is splittable; i.e. the
* RegionState is currently in splitting state or pending_close or
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java
index eb4f2c915215..04c5e5552982 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java
@@ -24,16 +24,11 @@
import org.apache.hadoop.hbase.catalog.MetaReader;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.executor.EventHandler;
-import org.apache.hadoop.hbase.executor.EventHandler.EventHandlerListener;
-import org.apache.hadoop.hbase.executor.EventType;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import java.io.IOException;
import java.util.List;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -86,35 +81,27 @@ public void testMasterOpsWhileSplitting() throws Exception {
tableRegions.get(0).getFirst().getEndKey());
// Now trigger a split and stop when the split is in progress
- CountDownLatch split = new CountDownLatch(1);
- CountDownLatch proceed = new CountDownLatch(1);
- RegionSplitListener list = new RegionSplitListener(split, proceed);
- cluster.getMaster().executorService.
- registerListener(EventType.RS_ZK_REGION_SPLIT, list);
-
LOG.info("Splitting table");
TEST_UTIL.getHBaseAdmin().split(TABLENAME);
LOG.info("Waiting for split result to be about to open");
- split.await(60, TimeUnit.SECONDS);
- try {
- LOG.info("Making sure we can call getTableRegions while opening");
- tableRegions = MetaReader.getTableRegionsAndLocations(m.getCatalogTracker(),
+ while (!m.assignmentManager.wasSplitHandlerCalled()) {
+ Thread.sleep(100);
+ }
+ LOG.info("Making sure we can call getTableRegions while opening");
+ tableRegions = MetaReader.getTableRegionsAndLocations(m.getCatalogTracker(),
TABLENAME, false);
- LOG.info("Regions: " + Joiner.on(',').join(tableRegions));
- // We have three regions because one is split-in-progress
- assertEquals(3, tableRegions.size());
- LOG.info("Making sure we can call getTableRegionClosest while opening");
- Pair<HRegionInfo, ServerName> pair =
+ LOG.info("Regions: " + Joiner.on(',').join(tableRegions));
+ // We have three regions because one is split-in-progress
+ assertEquals(3, tableRegions.size());
+ LOG.info("Making sure we can call getTableRegionClosest while opening");
+ Pair<HRegionInfo, ServerName> pair =
m.getTableRegionForRow(TABLENAME, Bytes.toBytes("cde"));
- LOG.info("Result is: " + pair);
- Pair<HRegionInfo, ServerName> tableRegionFromName =
+ LOG.info("Result is: " + pair);
+ Pair<HRegionInfo, ServerName> tableRegionFromName =
MetaReader.getRegion(m.getCatalogTracker(),
pair.getFirst().getRegionName());
- assertEquals(tableRegionFromName.getFirst(), pair.getFirst());
- } finally {
- proceed.countDown();
- }
+ assertEquals(tableRegionFromName.getFirst(), pair.getFirst());
}
@Test
@@ -175,33 +162,5 @@ public void testMoveThrowsPleaseHoldException() throws IOException {
TEST_UTIL.deleteTable(tableName);
}
}
-
- static class RegionSplitListener implements EventHandlerListener {
- CountDownLatch split, proceed;
-
- public RegionSplitListener(CountDownLatch split, CountDownLatch proceed) {
- this.split = split;
- this.proceed = proceed;
- }
-
- @Override
- public void afterProcess(EventHandler event) {
- if (event.getEventType() != EventType.RS_ZK_REGION_SPLIT) {
- return;
- }
- try {
- split.countDown();
- proceed.await(60, TimeUnit.SECONDS);
- } catch (InterruptedException ie) {
- throw new RuntimeException(ie);
- }
- return;
- }
-
- @Override
- public void beforeProcess(EventHandler event) {
- }
- }
-
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestZKBasedOpenCloseRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestZKBasedOpenCloseRegion.java
index 6902b2543c5f..d7532165310a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestZKBasedOpenCloseRegion.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestZKBasedOpenCloseRegion.java
@@ -26,7 +26,6 @@
import java.io.IOException;
import java.util.Collection;
-import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -43,10 +42,6 @@
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Durability;
-import org.apache.hadoop.hbase.executor.EventHandler;
-import org.apache.hadoop.hbase.executor.EventHandler.EventHandlerListener;
-import org.apache.hadoop.hbase.executor.EventType;
-import org.apache.hadoop.hbase.master.handler.TotesHRegionInfo;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.util.Bytes;
@@ -116,29 +111,14 @@ public class TestZKBasedOpenCloseRegion {
HRegionInfo hri = getNonMetaRegion(ProtobufUtil.getOnlineRegions(regionServer));
LOG.debug("Asking RS to close region " + hri.getRegionNameAsString());
- AtomicBoolean closeEventProcessed = new AtomicBoolean(false);
- AtomicBoolean reopenEventProcessed = new AtomicBoolean(false);
-
- EventHandlerListener closeListener =
- new ReopenEventListener(hri.getRegionNameAsString(),
- closeEventProcessed, EventType.RS_ZK_REGION_CLOSED);
- cluster.getMaster().executorService.
- registerListener(EventType.RS_ZK_REGION_CLOSED, closeListener);
-
- EventHandlerListener openListener =
- new ReopenEventListener(hri.getRegionNameAsString(),
- reopenEventProcessed, EventType.RS_ZK_REGION_OPENED);
- cluster.getMaster().executorService.
- registerListener(EventType.RS_ZK_REGION_OPENED, openListener);
-
LOG.info("Unassign " + hri.getRegionNameAsString());
cluster.getMaster().assignmentManager.unassign(hri);
- while (!closeEventProcessed.get()) {
+ while (!cluster.getMaster().assignmentManager.wasClosedHandlerCalled(hri)) {
Threads.sleep(100);
}
- while (!reopenEventProcessed.get()) {
+ while (!cluster.getMaster().assignmentManager.wasOpenedHandlerCalled(hri)) {
Threads.sleep(100);
}
@@ -157,83 +137,6 @@ private HRegionInfo getNonMetaRegion(final Collection<HRegionInfo> regions) {
return hri;
}
- public static class ReopenEventListener implements EventHandlerListener {
- private static final Log LOG = LogFactory.getLog(ReopenEventListener.class);
- String regionName;
- AtomicBoolean eventProcessed;
- EventType eventType;
-
- public ReopenEventListener(String regionName,
- AtomicBoolean eventProcessed, EventType eventType) {
- this.regionName = regionName;
- this.eventProcessed = eventProcessed;
- this.eventType = eventType;
- }
-
- @Override
- public void beforeProcess(EventHandler event) {
- if(event.getEventType() == eventType) {
- LOG.info("Received " + eventType + " and beginning to process it");
- }
- }
-
- @Override
- public void afterProcess(EventHandler event) {
- LOG.info("afterProcess(" + event + ")");
- if(event.getEventType() == eventType) {
- LOG.info("Finished processing " + eventType);
- String regionName = "";
- if(eventType == EventType.RS_ZK_REGION_OPENED) {
- TotesHRegionInfo hriCarrier = (TotesHRegionInfo)event;
- regionName = hriCarrier.getHRegionInfo().getRegionNameAsString();
- } else if(eventType == EventType.RS_ZK_REGION_CLOSED) {
- TotesHRegionInfo hriCarrier = (TotesHRegionInfo)event;
- regionName = hriCarrier.getHRegionInfo().getRegionNameAsString();
- }
- if(this.regionName.equals(regionName)) {
- eventProcessed.set(true);
- }
- synchronized(eventProcessed) {
- eventProcessed.notifyAll();
- }
- }
- }
- }
-
- public static class CloseRegionEventListener implements EventHandlerListener {
- private static final Log LOG = LogFactory.getLog(CloseRegionEventListener.class);
- String regionToClose;
- AtomicBoolean closeEventProcessed;
-
- public CloseRegionEventListener(String regionToClose,
- AtomicBoolean closeEventProcessed) {
- this.regionToClose = regionToClose;
- this.closeEventProcessed = closeEventProcessed;
- }
-
- @Override
- public void afterProcess(EventHandler event) {
- LOG.info("afterProcess(" + event + ")");
- if(event.getEventType() == EventType.RS_ZK_REGION_CLOSED) {
- LOG.info("Finished processing CLOSE REGION");
- TotesHRegionInfo hriCarrier = (TotesHRegionInfo)event;
- if (regionToClose.equals(hriCarrier.getHRegionInfo().getRegionNameAsString())) {
- LOG.info("Setting closeEventProcessed flag");
- closeEventProcessed.set(true);
- } else {
- LOG.info("Region to close didn't match");
- }
- }
- }
-
- @Override
- public void beforeProcess(EventHandler event) {
- if(event.getEventType() == EventType.M_RS_CLOSE_REGION) {
- LOG.info("Received CLOSE RPC and beginning to process it");
- }
- }
- }
-
/**
* This test shows how a region won't be able to be assigned to a RS
* if it's already "processing" it.
@@ -253,13 +156,6 @@ public void testRSAlreadyProcessingRegion() throws Exception {
// fake that hr1 is processing the region
hr1.getRegionsInTransitionInRS().putIfAbsent(hri.getEncodedNameAsBytes(), true);
- AtomicBoolean reopenEventProcessed = new AtomicBoolean(false);
- EventHandlerListener openListener =
- new ReopenEventListener(hri.getRegionNameAsString(),
- reopenEventProcessed, EventType.RS_ZK_REGION_OPENED);
- cluster.getMaster().executorService.
- registerListener(EventType.RS_ZK_REGION_OPENED, openListener);
-
// now ask the master to move the region to hr1, will fail
TEST_UTIL.getHBaseAdmin().move(hri.getEncodedNameAsBytes(),
Bytes.toBytes(hr1.getServerName().toString()));
@@ -269,22 +165,14 @@ public void testRSAlreadyProcessingRegion() throws Exception {
// remove the block and reset the boolean
hr1.getRegionsInTransitionInRS().remove(hri.getEncodedNameAsBytes());
- reopenEventProcessed.set(false);
// now try moving a region when there is no region in transition.
hri = getNonMetaRegion(ProtobufUtil.getOnlineRegions(hr1));
- openListener =
- new ReopenEventListener(hri.getRegionNameAsString(),
- reopenEventProcessed, EventType.RS_ZK_REGION_OPENED);
-
- cluster.getMaster().executorService.
- registerListener(EventType.RS_ZK_REGION_OPENED, openListener);
-
TEST_UTIL.getHBaseAdmin().move(hri.getEncodedNameAsBytes(),
Bytes.toBytes(hr0.getServerName().toString()));
- while (!reopenEventProcessed.get()) {
+ while (!cluster.getMaster().assignmentManager.wasOpenedHandlerCalled(hri)) {
Threads.sleep(100);
}
@@ -304,15 +192,9 @@ public void testRSAlreadyProcessingRegion() throws Exception {
HRegionInfo hri = getNonMetaRegion(ProtobufUtil.getOnlineRegions(regionServer));
LOG.debug("Asking RS to close region " + hri.getRegionNameAsString());
- AtomicBoolean closeEventProcessed = new AtomicBoolean(false);
- EventHandlerListener listener =
- new CloseRegionEventListener(hri.getRegionNameAsString(),
- closeEventProcessed);
- cluster.getMaster().executorService.registerListener(EventType.RS_ZK_REGION_CLOSED, listener);
-
cluster.getMaster().assignmentManager.unassign(hri);
- while (!closeEventProcessed.get()) {
+ while (!cluster.getMaster().assignmentManager.wasClosedHandlerCalled(hri)) {
Threads.sleep(100);
}
LOG.info("Done with testCloseRegion");
|
3b4f5d617969bc4d6bda9f0da17a141cb32bd521
|
camel
|
CAMEL-939 - Fix csv test that fails on slower- machines sometimes.--git-svn-id: https://svn.apache.org/repos/asf/activemq/camel/trunk@700232 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/camel
|
diff --git a/components/camel-csv/src/test/java/org/apache/camel/dataformat/csv/CsvRouteTest.java b/components/camel-csv/src/test/java/org/apache/camel/dataformat/csv/CsvRouteTest.java
index 5c0c336a6720c..e4ea16d6cc753 100644
--- a/components/camel-csv/src/test/java/org/apache/camel/dataformat/csv/CsvRouteTest.java
+++ b/components/camel-csv/src/test/java/org/apache/camel/dataformat/csv/CsvRouteTest.java
@@ -70,7 +70,11 @@ public void testSendMessage() throws Exception {
log.debug("Received " + text);
assertNotNull("Should be able to convert received body to a string", text);
- assertEquals("text body", "abc,123", text.trim());
+
+ // order is not guaranteed with a Map (which was passed in before)
+ // so we need to check for both combinations
+ assertTrue("Text body has wrong value.", "abc,123".equals(text.trim())
+ || "123,abc".equals(text.trim()));
}
}
|
5662b8294b0cd31ce780f577fc0847dd347b49ad
|
intellij-community
|
Fix for upsource--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/platform/projectModel-impl/src/com/intellij/openapi/roots/impl/RootIndex.java b/platform/projectModel-impl/src/com/intellij/openapi/roots/impl/RootIndex.java
index 34c9a5742527a..1cdd431d86def 100644
--- a/platform/projectModel-impl/src/com/intellij/openapi/roots/impl/RootIndex.java
+++ b/platform/projectModel-impl/src/com/intellij/openapi/roots/impl/RootIndex.java
@@ -300,7 +300,8 @@ private DirectoryInfo cacheInfos(VirtualFile dir, @Nullable VirtualFile stopAt,
}
@NotNull
- Query<VirtualFile> getDirectoriesByPackageName(@NotNull final String packageName, final boolean includeLibrarySources) {
+ public Query<VirtualFile> getDirectoriesByPackageName(@NotNull final String packageName, final boolean includeLibrarySources) {
+ // Note that this method is used in upsource as well, hence, don't reduce this method's visibility.
List<VirtualFile> result = myPackageDirectoryCache.getDirectoriesByPackageName(packageName);
if (!includeLibrarySources) {
result = ContainerUtil.filter(result, new Condition<VirtualFile>() {
|
9ac048c68ce96eec3a900c3b703401ae63170d51
|
elasticsearch
|
add "same" thread pool type (really, just for- testing)--
|
p
|
https://github.com/elastic/elasticsearch
|
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/modules/elasticsearch/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
index 6422040453788..0dd8fad71a72d 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/threadpool/ThreadPool.java
@@ -137,7 +137,10 @@ private Executor build(String name, String defaultType, @Nullable Settings setti
}
String type = settings.get("type", defaultType);
ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(settings, "[" + name + "]");
- if ("cached".equals(type)) {
+ if ("same".equals(type)) {
+ logger.debug("creating thread_pool [{}], type [{}]", name, type);
+ return MoreExecutors.sameThreadExecutor();
+ } else if ("cached".equals(type)) {
TimeValue keepAlive = settings.getAsTime("keep_alive", defaultSettings.getAsTime("keep_alive", timeValueMinutes(5)));
logger.debug("creating thread_pool [{}], type [{}], keep_alive [{}]", name, type, keepAlive);
return new ThreadPoolExecutor(0, Integer.MAX_VALUE,
|
ebe25c83d1f5f1202c560516e8d58294b97ddf37
|
orientdb
|
Fixed bug using SQL projection against Object- Database interface. Now returns always documents instead of POJOs when the- ODocument has no class associated.--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/db/ODatabasePojoAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/db/ODatabasePojoAbstract.java
index 38afe2694d8..6e3ad037ca7 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/db/ODatabasePojoAbstract.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/db/ODatabasePojoAbstract.java
@@ -212,7 +212,11 @@ public <RET extends List<?>> RET query(final OQuery<?> iCommand) {
Object obj;
for (ODocument doc : result) {
// GET THE ASSOCIATED DOCUMENT
- obj = getUserObjectByRecord(doc, iCommand.getFetchPlan(), true);
+ if (doc.getClassName() == null)
+ obj = doc;
+ else
+ obj = getUserObjectByRecord(doc, iCommand.getFetchPlan(), true);
+
resultPojo.add(obj);
}
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDDocumentPhysicalTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDDocumentPhysicalTest.java
index 27699e60437..d073da7d843 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDDocumentPhysicalTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDDocumentPhysicalTest.java
@@ -30,6 +30,7 @@
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.serialization.OBase64Utils;
+import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery;
@Test(groups = { "crud", "record-vobject" }, sequential = true)
public class CRUDDocumentPhysicalTest {
@@ -234,4 +235,35 @@ public void testNestedEmbeddedMap() {
Assert.assertEquals(loadedMap3.size(), 0);
}
+ @Test
+ public void queryWithPositionalParameters() {
+ database = ODatabaseDocumentPool.global().acquire(url, "admin", "admin");
+ database.open("admin", "admin");
+
+ final OSQLSynchQuery<ODocument> query = new OSQLSynchQuery<ODocument>("select from Profile where name = ? and surname = ?");
+ List<ODocument> result = database.command(query).execute("Barack", "Obama");
+
+ Assert.assertTrue(result.size() != 0);
+
+ database.close();
+ }
+
+ @Test
+ public void queryWithNamedParameters() {
+ database = ODatabaseDocumentPool.global().acquire(url, "admin", "admin");
+ database.open("admin", "admin");
+
+ final OSQLSynchQuery<ODocument> query = new OSQLSynchQuery<ODocument>(
+ "select from Profile where name = :name and surname = :surname");
+
+ HashMap<String, String> params = new HashMap<String, String>();
+ params.put("name", "Barack");
+ params.put("surname", "Obama");
+
+ List<ODocument> result = database.command(query).execute(params);
+
+ Assert.assertTrue(result.size() != 0);
+
+ database.close();
+ }
}
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDObjectPhysicalTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDObjectPhysicalTest.java
index 86d690ad36f..c898058b253 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDObjectPhysicalTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDObjectPhysicalTest.java
@@ -16,6 +16,7 @@
package com.orientechnologies.orient.test.database.auto;
import java.util.Date;
+import java.util.HashMap;
import java.util.List;
import org.testng.Assert;
@@ -276,4 +277,36 @@ public void deleteFirst() {
database.close();
}
+
+ @Test
+ public void queryWithPositionalParameters() {
+ database = ODatabaseObjectPool.global().acquire(url, "admin", "admin");
+ database.open("admin", "admin");
+
+ final OSQLSynchQuery<ODocument> query = new OSQLSynchQuery<ODocument>("select from Profile where name = ? and surname = ?");
+ List<ODocument> result = database.command(query).execute("Barack", "Obama");
+
+ Assert.assertTrue(result.size() != 0);
+
+ database.close();
+ }
+
+ @Test
+ public void queryWithNamedParameters() {
+ database = ODatabaseObjectPool.global().acquire(url, "admin", "admin");
+ database.open("admin", "admin");
+
+ final OSQLSynchQuery<ODocument> query = new OSQLSynchQuery<ODocument>(
+ "select from Profile where name = :name and surname = :surname");
+
+ HashMap<String, String> params = new HashMap<String, String>();
+ params.put("name", "Barack");
+ params.put("surname", "Obama");
+
+ List<ODocument> result = database.command(query).execute(params);
+
+ Assert.assertTrue(result.size() != 0);
+
+ database.close();
+ }
}
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectProjectionsTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectProjectionsTest.java
index 9a2111e1fe9..203fb57e5b8 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectProjectionsTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectProjectionsTest.java
@@ -23,15 +23,18 @@
import com.orientechnologies.orient.core.db.document.ODatabaseDocument;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
+import com.orientechnologies.orient.core.db.object.ODatabaseObjectTx;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery;
@Test(groups = "sql-select")
public class SQLSelectProjectionsTest {
+ private String url;
private ODatabaseDocument database;
@Parameters(value = "url")
public SQLSelectProjectionsTest(String iURL) {
+ url = iURL;
database = new ODatabaseDocumentTx(iURL);
}
@@ -52,6 +55,23 @@ public void queryProjectionOk() {
database.close();
}
+ @Test
+ public void queryProjectionObjectLevel() {
+ ODatabaseObjectTx db = new ODatabaseObjectTx(url);
+ db.open("admin", "admin");
+
+ List<ODocument> result = db.query(new OSQLSynchQuery<ODocument>(" select nick, followings, followers from Profile "));
+
+ Assert.assertTrue(result.size() != 0);
+
+ for (ODocument d : result) {
+ Assert.assertNull(d.getClassName());
+ Assert.assertEquals(d.getRecordType(), ODocument.RECORD_TYPE);
+ }
+
+ db.close();
+ }
+
@Test
public void queryProjectionLinkedAndFunction() {
database.open("admin", "admin");
@@ -141,7 +161,8 @@ public void queryProjectionAliases() {
database.open("admin", "admin");
List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select name.append('!') as 1, surname as 2 from Profile where name is not null and surname is not null")).execute();
+ new OSQLSynchQuery<ODocument>(
+ "select name.append('!') as 1, surname as 2 from Profile where name is not null and surname is not null")).execute();
Assert.assertTrue(result.size() != 0);
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectTest.java
index 719e6140bbc..d4f5e5c0261 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectTest.java
@@ -18,7 +18,6 @@
import java.text.ParseException;
import java.util.Collection;
import java.util.Date;
-import java.util.HashMap;
import java.util.List;
import org.testng.Assert;
@@ -574,34 +573,4 @@ public void queryWithPagination() {
database.close();
}
-
- @Test
- public void queryWithPositionalParameters() {
- database.open("admin", "admin");
-
- final OSQLSynchQuery<ODocument> query = new OSQLSynchQuery<ODocument>("select from Profile where name = ? and surname = ?");
- List<ODocument> result = database.command(query).execute("Barack", "Obama");
-
- Assert.assertTrue(result.size() != 0);
-
- database.close();
- }
-
- @Test
- public void queryWithNamedParameters() {
- database.open("admin", "admin");
-
- final OSQLSynchQuery<ODocument> query = new OSQLSynchQuery<ODocument >(
- "select from Profile where name = :name and surname = :surname");
-
- HashMap<String, String> params = new HashMap<String, String>();
- params.put("name", "Barack");
- params.put("surname", "Obama");
-
- List<ODocument> result = database.command(query).execute(params);
-
- Assert.assertTrue(result.size() != 0);
-
- database.close();
- }
}
|
6f259eb229470099173b20b236e55435597b1e3f
|
ReactiveX-RxJava
|
Delay: error cut ahead was not properly serialized--
|
c
|
https://github.com/ReactiveX/RxJava
|
diff --git a/src/main/java/rx/internal/operators/OperatorDelay.java b/src/main/java/rx/internal/operators/OperatorDelay.java
index 48b8454dc8..00ab5d1b49 100644
--- a/src/main/java/rx/internal/operators/OperatorDelay.java
+++ b/src/main/java/rx/internal/operators/OperatorDelay.java
@@ -49,22 +49,36 @@ public Subscriber<? super T> call(final Subscriber<? super T> child) {
final Worker worker = scheduler.createWorker();
child.add(worker);
return new Subscriber<T>(child) {
-
+ // indicates an error cut ahead
+ // accessed from the worker thread only
+ boolean done;
@Override
public void onCompleted() {
worker.schedule(new Action0() {
@Override
public void call() {
- child.onCompleted();
+ if (!done) {
+ done = true;
+ child.onCompleted();
+ }
}
}, delay, unit);
}
@Override
- public void onError(Throwable e) {
- child.onError(e);
+ public void onError(final Throwable e) {
+ worker.schedule(new Action0() {
+ @Override
+ public void call() {
+ if (!done) {
+ done = true;
+ child.onError(e);
+ worker.unsubscribe();
+ }
+ }
+ });
}
@Override
@@ -73,7 +87,9 @@ public void onNext(final T t) {
@Override
public void call() {
- child.onNext(t);
+ if (!done) {
+ child.onNext(t);
+ }
}
}, delay, unit);
diff --git a/src/test/java/rx/internal/operators/OperatorDelayTest.java b/src/test/java/rx/internal/operators/OperatorDelayTest.java
index 9f80f0dc73..e4db021eaf 100644
--- a/src/test/java/rx/internal/operators/OperatorDelayTest.java
+++ b/src/test/java/rx/internal/operators/OperatorDelayTest.java
@@ -798,4 +798,27 @@ public Integer call(Integer t) {
ts.assertNoErrors();
assertEquals(RxRingBuffer.SIZE * 2, ts.getOnNextEvents().size());
}
+
+ @Test
+ public void testErrorRunsBeforeOnNext() {
+ TestScheduler test = Schedulers.test();
+
+ PublishSubject<Integer> ps = PublishSubject.create();
+
+ TestSubscriber<Integer> ts = TestSubscriber.create();
+
+ ps.delay(1, TimeUnit.SECONDS, test).subscribe(ts);
+
+ ps.onNext(1);
+
+ test.advanceTimeBy(500, TimeUnit.MILLISECONDS);
+
+ ps.onError(new TestException());
+
+ test.advanceTimeBy(1, TimeUnit.SECONDS);
+
+ ts.assertNoValues();
+ ts.assertError(TestException.class);
+ ts.assertNotCompleted();
+ }
}
|
e8585afa032d2aee0593b238c46799cbb884732d
|
hadoop
|
YARN-45. Add protocol for schedulers to request- containers back from ApplicationMasters. Contributed by Carlo Curino and- Chris Douglas.--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1479773 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index a83cc29faaae7..5bd6f8e65b838 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -45,6 +45,9 @@ Release 2.0.5-beta - UNRELEASED
YARN-482. FS: Extend SchedulingMode to intermediate queues.
(kkambatl via tucu)
+ YARN-45. Add protocol for schedulers to request containers back from
+ ApplicationMasters. (Carlo Curino, cdouglas)
+
IMPROVEMENTS
YARN-365. Change NM heartbeat handling to not generate a scheduler event
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/AllocateResponse.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/AllocateResponse.java
index 0426ee359a6bc..8da0d95bb2cdf 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/AllocateResponse.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/AllocateResponse.java
@@ -22,6 +22,7 @@
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.classification.InterfaceStability.Stable;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.api.AMRMProtocol;
@@ -48,6 +49,7 @@
* </li>
* <li>A list of nodes whose status has been updated.</li>
* <li>The number of available nodes in a cluster.</li>
+ * <li>A description of resources requested back by the cluster</li>
* </ul>
* </p>
*
@@ -152,4 +154,27 @@ public interface AllocateResponse {
@Private
@Unstable
public void setNumClusterNodes(int numNodes);
+
+ /**
+ * Get the description of containers owned by the AM, but requested back by
+ * the cluster. Note that the RM may have an inconsistent view of the
+ * resources owned by the AM. These messages are advisory, and the AM may
+ * elect to ignore them.
+ *
+ * The message is a snapshot of the resources the RM wants back from the AM.
+ * While demand persists, the RM will repeat its request; applications should
+ * not interpret each message as a request for <emph>additional<emph>
+ * resources on top of previous messages. Resources requested consistently
+ * over some duration may be forcibly killed by the RM.
+ *
+ * @return A specification of the resources to reclaim from this AM.
+ */
+ @Public
+ @Evolving
+ public PreemptionMessage getPreemptionMessage();
+
+ @Private
+ @Unstable
+ public void setPreemptionMessage(PreemptionMessage request);
+
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/PreemptionContainer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/PreemptionContainer.java
new file mode 100644
index 0000000000000..d51d696854b58
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/PreemptionContainer.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.api.protocolrecords;
+
+import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.classification.InterfaceAudience.Private;
+import org.apache.hadoop.classification.InterfaceStability.Evolving;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+
+/**
+ * Specific container requested back by the <code>ResourceManager</code>.
+ * @see PreemptionContract
+ * @see StrictPreemptionContract
+ */
+public interface PreemptionContainer {
+
+ /**
+ * @return Container referenced by this handle.
+ */
+ @Public
+ @Evolving
+ public ContainerId getId();
+
+ @Private
+ @Unstable
+ public void setId(ContainerId id);
+
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/PreemptionContract.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/PreemptionContract.java
new file mode 100644
index 0000000000000..8fc64e5085e33
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/PreemptionContract.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.api.protocolrecords;
+
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.classification.InterfaceAudience.Private;
+import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.classification.InterfaceStability.Evolving;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+
+/**
+ * Description of resources requested back by the <code>ResourceManager</code>.
+ * The <code>ApplicationMaster</code> (AM) can satisfy this request according
+ * to its own priorities to prevent containers from being forcibly killed by
+ * the platform.
+ * @see PreemptionMessage
+ */
+public interface PreemptionContract {
+
+ /**
+ * If the AM releases resources matching these requests, then the {@link
+ * PreemptionContainer}s enumerated in {@link #getContainers()} should not be
+ * evicted from the cluster. Due to delays in propagating cluster state and
+ * sending these messages, there are conditions where satisfied contracts may
+ * not prevent the platform from killing containers.
+ * @return List of {@link PreemptionResourceRequest} to update the
+ * <code>ApplicationMaster</code> about resources requested back by the
+ * <code>ResourceManager</code>.
+ * @see AllocateRequest#setAskList(List)
+ */
+ @Public
+ @Evolving
+ public List<PreemptionResourceRequest> getResourceRequest();
+
+ @Private
+ @Unstable
+ public void setResourceRequest(List<PreemptionResourceRequest> req);
+
+ /**
+ * Assign the set of {@link PreemptionContainer} specifying which containers
+ * owned by the <code>ApplicationMaster</code> that may be reclaimed by the
+ * <code>ResourceManager</code>. If the AM prefers a different set of
+ * containers, then it may checkpoint or kill containers matching the
+ * description in {@link #getResourceRequest}.
+ * @return Set of containers at risk if the contract is not met.
+ */
+ @Public
+ @Evolving
+ public Set<PreemptionContainer> getContainers();
+
+
+ @Private
+ @Unstable
+ public void setContainers(Set<PreemptionContainer> containers);
+
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/PreemptionMessage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/PreemptionMessage.java
new file mode 100644
index 0000000000000..a7961fead61bc
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/PreemptionMessage.java
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.api.protocolrecords;
+
+import org.apache.hadoop.classification.InterfaceAudience.Private;
+import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.classification.InterfaceStability.Evolving;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+
+/**
+ * A {@link PreemptionMessage} is part of the RM-AM protocol, and it is used by
+ * the RM to specify resources that the RM wants to reclaim from this
+ * <code>ApplicationMaster</code> (AM). The AM receives a {@link
+ * StrictPreemptionContract} message encoding which containers the platform may
+ * forcibly kill, granting it an opportunity to checkpoint state or adjust its
+ * execution plan. The message may also include a {@link PreemptionContract}
+ * granting the AM more latitude in selecting which resources to return to the
+ * cluster.
+ *
+ * The AM should decode both parts of the message. The {@link
+ * StrictPreemptionContract} specifies particular allocations that the RM
+ * requires back. The AM can checkpoint containers' state, adjust its execution
+ * plan to move the computation, or take no action and hope that conditions that
+ * caused the RM to ask for the container will change.
+ *
+ * In contrast, the {@link PreemptionContract} also includes a description of
+ * resources with a set of containers. If the AM releases containers matching
+ * that profile, then the containers enumerated in {@link
+ * PreemptionContract#getContainers()} may not be killed.
+ *
+ * Each preemption message reflects the RM's current understanding of the
+ * cluster state, so a request to return <emph>N</emph> containers may not
+ * reflect containers the AM is releasing, recently exited containers the RM has
+ * yet to learn about, or new containers allocated before the message was
+ * generated. Conversely, an RM may request a different profile of containers in
+ * subsequent requests.
+ *
+ * The policy enforced by the RM is part of the scheduler. Generally, only
+ * containers that have been requested consistently should be killed, but the
+ * details are not specified.
+ */
+@Public
+@Evolving
+public interface PreemptionMessage {
+
+ /**
+ * @return Specific resources that may be killed by the
+ * <code>ResourceManager</code>
+ */
+ @Public
+ @Evolving
+ public StrictPreemptionContract getStrictContract();
+
+ @Private
+ @Unstable
+ public void setStrictContract(StrictPreemptionContract set);
+
+ /**
+ * @return Contract describing resources to return to the cluster.
+ */
+ @Public
+ @Evolving
+ public PreemptionContract getContract();
+
+ @Private
+ @Unstable
+ public void setContract(PreemptionContract contract);
+
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/PreemptionResourceRequest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/PreemptionResourceRequest.java
new file mode 100644
index 0000000000000..1187fd8d25f4b
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/PreemptionResourceRequest.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.api.protocolrecords;
+
+import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.classification.InterfaceAudience.Private;
+import org.apache.hadoop.classification.InterfaceStability.Evolving;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.yarn.api.records.ResourceRequest;
+
+/**
+ * Description of resources requested back by the cluster.
+ * @see PreemptionContract
+ * @see AllocateRequest#setAskList(java.util.List)
+ */
+public interface PreemptionResourceRequest {
+
+ /**
+ * @return Resource described in this request, to be matched against running
+ * containers.
+ */
+ @Public
+ @Evolving
+ public ResourceRequest getResourceRequest();
+
+ @Private
+ @Unstable
+ public void setResourceRequest(ResourceRequest req);
+
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/StrictPreemptionContract.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/StrictPreemptionContract.java
new file mode 100644
index 0000000000000..11d7bb9f68b99
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/StrictPreemptionContract.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.api.protocolrecords;
+
+import java.util.Set;
+
+import org.apache.hadoop.classification.InterfaceAudience.Private;
+import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.classification.InterfaceStability.Evolving;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+
+/**
+ * Enumeration of particular allocations to be reclaimed. The platform will
+ * reclaim exactly these resources, so the <code>ApplicationMaster</code> (AM)
+ * may attempt to checkpoint work or adjust its execution plan to accommodate
+ * it. In contrast to {@link PreemptionContract}, the AM has no flexibility in
+ * selecting which resources to return to the cluster.
+ * @see PreemptionMessage
+ */
+@Public
+@Evolving
+public interface StrictPreemptionContract {
+
+ /**
+ * Get the set of {@link PreemptionContainer} specifying containers owned by
+ * the <code>ApplicationMaster</code> that may be reclaimed by the
+ * <code>ResourceManager</code>.
+ * @return the set of {@link ContainerId} to be preempted.
+ */
+ @Public
+ @Evolving
+ public Set<PreemptionContainer> getContainers();
+
+ @Private
+ @Unstable
+ public void setContainers(Set<PreemptionContainer> containers);
+
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateResponsePBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateResponsePBImpl.java
index 4643e4ed02e19..dac8c73580d07 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateResponsePBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateResponsePBImpl.java
@@ -24,6 +24,7 @@
import java.util.List;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.PreemptionMessage;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeReport;
@@ -39,7 +40,7 @@
import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProtoOrBuilder;
-
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.PreemptionMessageProto;
public class AllocateResponsePBImpl extends ProtoBase<AllocateResponseProto>
@@ -54,6 +55,7 @@ public class AllocateResponsePBImpl extends ProtoBase<AllocateResponseProto>
private List<ContainerStatus> completedContainersStatuses = null;
private List<NodeReport> updatedNodes = null;
+ private PreemptionMessage preempt;
public AllocateResponsePBImpl() {
@@ -94,6 +96,9 @@ private synchronized void mergeLocalToBuilder() {
if (this.limit != null) {
builder.setLimit(convertToProtoFormat(this.limit));
}
+ if (this.preempt != null) {
+ builder.setPreempt(convertToProtoFormat(this.preempt));
+ }
}
private synchronized void mergeLocalToProto() {
@@ -217,6 +222,28 @@ public synchronized void setNumClusterNodes(int numNodes) {
builder.setNumClusterNodes(numNodes);
}
+ @Override
+ public synchronized PreemptionMessage getPreemptionMessage() {
+ AllocateResponseProtoOrBuilder p = viaProto ? proto : builder;
+ if (this.preempt != null) {
+ return this.preempt;
+ }
+ if (!p.hasPreempt()) {
+ return null;
+ }
+ this.preempt = convertFromProtoFormat(p.getPreempt());
+ return this.preempt;
+ }
+
+ @Override
+ public synchronized void setPreemptionMessage(PreemptionMessage preempt) {
+ maybeInitBuilder();
+ if (null == preempt) {
+ builder.clearPreempt();
+ }
+ this.preempt = preempt;
+ }
+
// Once this is called. updatedNodes will never be null - until a getProto is
// called.
private synchronized void initLocalNewNodeReportList() {
@@ -393,4 +420,11 @@ private synchronized ResourceProto convertToProtoFormat(Resource r) {
return ((ResourcePBImpl) r).getProto();
}
+ private synchronized PreemptionMessagePBImpl convertFromProtoFormat(PreemptionMessageProto p) {
+ return new PreemptionMessagePBImpl(p);
+ }
+
+ private synchronized PreemptionMessageProto convertToProtoFormat(PreemptionMessage r) {
+ return ((PreemptionMessagePBImpl)r).getProto();
+ }
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/PreemptionContainerPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/PreemptionContainerPBImpl.java
new file mode 100644
index 0000000000000..624d1270f4b7a
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/PreemptionContainerPBImpl.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
+
+import org.apache.hadoop.yarn.api.protocolrecords.PreemptionContainer;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl;
+import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.PreemptionContainerProto;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.PreemptionContainerProtoOrBuilder;
+
+public class PreemptionContainerPBImpl implements PreemptionContainer {
+
+ PreemptionContainerProto proto =
+ PreemptionContainerProto.getDefaultInstance();
+ PreemptionContainerProto.Builder builder = null;
+
+ boolean viaProto = false;
+ private ContainerId id;
+
+ public PreemptionContainerPBImpl() {
+ builder = PreemptionContainerProto.newBuilder();
+ }
+
+ public PreemptionContainerPBImpl(PreemptionContainerProto proto) {
+ this.proto = proto;
+ viaProto = true;
+ }
+
+ public synchronized PreemptionContainerProto getProto() {
+ mergeLocalToProto();
+ proto = viaProto ? proto : builder.build();
+ viaProto = true;
+ return proto;
+ }
+
+ private void mergeLocalToProto() {
+ if (viaProto)
+ maybeInitBuilder();
+ mergeLocalToBuilder();
+ proto = builder.build();
+ viaProto = true;
+ }
+
+ private void mergeLocalToBuilder() {
+ if (id != null) {
+ builder.setId(convertToProtoFormat(id));
+ }
+ }
+
+ private void maybeInitBuilder() {
+ if (viaProto || builder == null) {
+ builder = PreemptionContainerProto.newBuilder(proto);
+ }
+ viaProto = false;
+ }
+
+ @Override
+ public synchronized ContainerId getId() {
+ PreemptionContainerProtoOrBuilder p = viaProto ? proto : builder;
+ if (id != null) {
+ return id;
+ }
+ if (!p.hasId()) {
+ return null;
+ }
+ id = convertFromProtoFormat(p.getId());
+ return id;
+ }
+
+ @Override
+ public synchronized void setId(final ContainerId id) {
+ maybeInitBuilder();
+ if (null == id) {
+ builder.clearId();
+ }
+ this.id = id;
+ }
+
+ private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) {
+ return new ContainerIdPBImpl(p);
+ }
+
+ private ContainerIdProto convertToProtoFormat(ContainerId t) {
+ return ((ContainerIdPBImpl)t).getProto();
+ }
+
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/PreemptionContractPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/PreemptionContractPBImpl.java
new file mode 100644
index 0000000000000..61534365ca0ec
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/PreemptionContractPBImpl.java
@@ -0,0 +1,228 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.yarn.api.protocolrecords.PreemptionContainer;
+import org.apache.hadoop.yarn.api.protocolrecords.PreemptionContract;
+import org.apache.hadoop.yarn.api.protocolrecords.PreemptionResourceRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.PreemptionResourceRequestPBImpl;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.PreemptionContainerProto;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.PreemptionContractProto;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.PreemptionContractProtoOrBuilder;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.PreemptionResourceRequestProto;
+
+public class PreemptionContractPBImpl implements PreemptionContract {
+
+ PreemptionContractProto proto = PreemptionContractProto.getDefaultInstance();
+ PreemptionContractProto.Builder builder = null;
+
+ boolean viaProto = false;
+ private Set<PreemptionContainer> containers;
+ private List<PreemptionResourceRequest> resources;
+
+ public PreemptionContractPBImpl() {
+ builder = PreemptionContractProto.newBuilder();
+ }
+
+ public PreemptionContractPBImpl(PreemptionContractProto proto) {
+ this.proto = proto;
+ viaProto = true;
+ }
+
+ public synchronized PreemptionContractProto getProto() {
+ mergeLocalToProto();
+ proto = viaProto ? proto : builder.build();
+ viaProto = true;
+ return proto;
+ }
+
+ private void mergeLocalToProto() {
+ if (viaProto)
+ maybeInitBuilder();
+ mergeLocalToBuilder();
+ proto = builder.build();
+ viaProto = true;
+ }
+
+ private void mergeLocalToBuilder() {
+ if (this.resources != null) {
+ addResourcesToProto();
+ }
+ if (this.containers != null) {
+ addContainersToProto();
+ }
+ }
+
+ private void maybeInitBuilder() {
+ if (viaProto || builder == null) {
+ builder = PreemptionContractProto.newBuilder(proto);
+ }
+ viaProto = false;
+ }
+
+ @Override
+ public synchronized Set<PreemptionContainer> getContainers() {
+ initPreemptionContainers();
+ return containers;
+ }
+
+ @Override
+ public synchronized void setContainers(
+ final Set<PreemptionContainer> containers) {
+ if (null == containers) {
+ builder.clearContainer();
+ }
+ this.containers = containers;
+ }
+
+ @Override
+ public synchronized List<PreemptionResourceRequest> getResourceRequest() {
+ initPreemptionResourceRequests();
+ return resources;
+ }
+
+ @Override
+ public synchronized void setResourceRequest(
+ final List<PreemptionResourceRequest> req) {
+ if (null == resources) {
+ builder.clearResource();
+ }
+ this.resources = req;
+ }
+
+ private void initPreemptionResourceRequests() {
+ if (resources != null) {
+ return;
+ }
+ PreemptionContractProtoOrBuilder p = viaProto ? proto : builder;
+ List<PreemptionResourceRequestProto> list = p.getResourceList();
+ resources = new ArrayList<PreemptionResourceRequest>();
+
+ for (PreemptionResourceRequestProto rr : list) {
+ resources.add(convertFromProtoFormat(rr));
+ }
+ }
+
+ private void addResourcesToProto() {
+ maybeInitBuilder();
+ builder.clearResource();
+ if (null == resources) {
+ return;
+ }
+ Iterable<PreemptionResourceRequestProto> iterable =
+ new Iterable<PreemptionResourceRequestProto>() {
+ @Override
+ public Iterator<PreemptionResourceRequestProto> iterator() {
+ return new Iterator<PreemptionResourceRequestProto>() {
+
+ Iterator<PreemptionResourceRequest> iter = resources.iterator();
+
+ @Override
+ public boolean hasNext() {
+ return iter.hasNext();
+ }
+
+ @Override
+ public PreemptionResourceRequestProto next() {
+ return convertToProtoFormat(iter.next());
+ }
+
+ @Override
+ public void remove() {
+ throw new UnsupportedOperationException();
+
+ }
+ };
+
+ }
+ };
+ builder.addAllResource(iterable);
+ }
+
+ private void initPreemptionContainers() {
+ if (containers != null) {
+ return;
+ }
+ PreemptionContractProtoOrBuilder p = viaProto ? proto : builder;
+ List<PreemptionContainerProto> list = p.getContainerList();
+ containers = new HashSet<PreemptionContainer>();
+
+ for (PreemptionContainerProto c : list) {
+ containers.add(convertFromProtoFormat(c));
+ }
+ }
+
+ private void addContainersToProto() {
+ maybeInitBuilder();
+ builder.clearContainer();
+ if (null == containers) {
+ return;
+ }
+ Iterable<PreemptionContainerProto> iterable =
+ new Iterable<PreemptionContainerProto>() {
+ @Override
+ public Iterator<PreemptionContainerProto> iterator() {
+ return new Iterator<PreemptionContainerProto>() {
+
+ Iterator<PreemptionContainer> iter = containers.iterator();
+
+ @Override
+ public boolean hasNext() {
+ return iter.hasNext();
+ }
+
+ @Override
+ public PreemptionContainerProto next() {
+ return convertToProtoFormat(iter.next());
+ }
+
+ @Override
+ public void remove() {
+ throw new UnsupportedOperationException();
+
+ }
+ };
+
+ }
+ };
+ builder.addAllContainer(iterable);
+ }
+
+ private PreemptionContainerPBImpl convertFromProtoFormat(PreemptionContainerProto p) {
+ return new PreemptionContainerPBImpl(p);
+ }
+
+ private PreemptionContainerProto convertToProtoFormat(PreemptionContainer t) {
+ return ((PreemptionContainerPBImpl)t).getProto();
+ }
+
+ private PreemptionResourceRequestPBImpl convertFromProtoFormat(PreemptionResourceRequestProto p) {
+ return new PreemptionResourceRequestPBImpl(p);
+ }
+
+ private PreemptionResourceRequestProto convertToProtoFormat(PreemptionResourceRequest t) {
+ return ((PreemptionResourceRequestPBImpl)t).getProto();
+ }
+
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/PreemptionMessagePBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/PreemptionMessagePBImpl.java
new file mode 100644
index 0000000000000..72a7eb151ffb3
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/PreemptionMessagePBImpl.java
@@ -0,0 +1,141 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
+
+import org.apache.hadoop.yarn.api.protocolrecords.PreemptionContract;
+import org.apache.hadoop.yarn.api.protocolrecords.PreemptionMessage;
+import org.apache.hadoop.yarn.api.protocolrecords.StrictPreemptionContract;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.PreemptionContractProto;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.PreemptionMessageProto;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.PreemptionMessageProtoOrBuilder;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.StrictPreemptionContractProto;
+
+public class PreemptionMessagePBImpl implements PreemptionMessage {
+
+ PreemptionMessageProto proto = PreemptionMessageProto.getDefaultInstance();
+ PreemptionMessageProto.Builder builder = null;
+
+ boolean viaProto = false;
+ private StrictPreemptionContract strict;
+ private PreemptionContract contract;
+
+ public PreemptionMessagePBImpl() {
+ builder = PreemptionMessageProto.newBuilder();
+ }
+
+ public PreemptionMessagePBImpl(PreemptionMessageProto proto) {
+ this.proto = proto;
+ viaProto = true;
+ }
+
+ public synchronized PreemptionMessageProto getProto() {
+ mergeLocalToProto();
+ proto = viaProto ? proto : builder.build();
+ viaProto = true;
+ return proto;
+ }
+
+ private void mergeLocalToProto() {
+ if (viaProto)
+ maybeInitBuilder();
+ mergeLocalToBuilder();
+ proto = builder.build();
+ viaProto = true;
+ }
+
+ private void mergeLocalToBuilder() {
+ if (strict != null) {
+ builder.setStrictContract(convertToProtoFormat(strict));
+ }
+ if (contract != null) {
+ builder.setContract(convertToProtoFormat(contract));
+ }
+ }
+
+ private void maybeInitBuilder() {
+ if (viaProto || builder == null) {
+ builder = PreemptionMessageProto.newBuilder(proto);
+ }
+ viaProto = false;
+ }
+
+ @Override
+ public synchronized StrictPreemptionContract getStrictContract() {
+ PreemptionMessageProtoOrBuilder p = viaProto ? proto : builder;
+ if (strict != null) {
+ return strict;
+ }
+ if (!p.hasStrictContract()) {
+ return null;
+ }
+ strict = convertFromProtoFormat(p.getStrictContract());
+ return strict;
+ }
+
+ @Override
+ public synchronized void setStrictContract(StrictPreemptionContract strict) {
+ maybeInitBuilder();
+ if (null == strict) {
+ builder.clearStrictContract();
+ }
+ this.strict = strict;
+ }
+
+ @Override
+ public synchronized PreemptionContract getContract() {
+ PreemptionMessageProtoOrBuilder p = viaProto ? proto : builder;
+ if (contract != null) {
+ return contract;
+ }
+ if (!p.hasContract()) {
+ return null;
+ }
+ contract = convertFromProtoFormat(p.getContract());
+ return contract;
+ }
+
+ @Override
+ public synchronized void setContract(final PreemptionContract c) {
+ maybeInitBuilder();
+ if (null == c) {
+ builder.clearContract();
+ }
+ this.contract = c;
+ }
+
+ private StrictPreemptionContractPBImpl convertFromProtoFormat(
+ StrictPreemptionContractProto p) {
+ return new StrictPreemptionContractPBImpl(p);
+ }
+
+ private StrictPreemptionContractProto convertToProtoFormat(
+ StrictPreemptionContract t) {
+ return ((StrictPreemptionContractPBImpl)t).getProto();
+ }
+
+ private PreemptionContractPBImpl convertFromProtoFormat(
+ PreemptionContractProto p) {
+ return new PreemptionContractPBImpl(p);
+ }
+
+ private PreemptionContractProto convertToProtoFormat(
+ PreemptionContract t) {
+ return ((PreemptionContractPBImpl)t).getProto();
+ }
+
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/PreemptionResourceRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/PreemptionResourceRequestPBImpl.java
new file mode 100644
index 0000000000000..8b6ca2d4f6089
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/PreemptionResourceRequestPBImpl.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
+
+import org.apache.hadoop.yarn.api.protocolrecords.PreemptionResourceRequest;
+import org.apache.hadoop.yarn.api.records.ResourceRequest;
+import org.apache.hadoop.yarn.api.records.impl.pb.ResourceRequestPBImpl;
+import org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.PreemptionResourceRequestProto;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.PreemptionResourceRequestProtoOrBuilder;
+
+public class PreemptionResourceRequestPBImpl implements PreemptionResourceRequest {
+
+ PreemptionResourceRequestProto proto =
+ PreemptionResourceRequestProto.getDefaultInstance();
+ PreemptionResourceRequestProto.Builder builder = null;
+
+ boolean viaProto = false;
+ private ResourceRequest rr;
+
+ public PreemptionResourceRequestPBImpl() {
+ builder = PreemptionResourceRequestProto.newBuilder();
+ }
+
+ public PreemptionResourceRequestPBImpl(PreemptionResourceRequestProto proto) {
+ this.proto = proto;
+ viaProto = true;
+ }
+
+ public synchronized PreemptionResourceRequestProto getProto() {
+ mergeLocalToProto();
+ proto = viaProto ? proto : builder.build();
+ viaProto = true;
+ return proto;
+ }
+
+ private void mergeLocalToProto() {
+ if (viaProto)
+ maybeInitBuilder();
+ mergeLocalToBuilder();
+ proto = builder.build();
+ viaProto = true;
+ }
+
+ private void mergeLocalToBuilder() {
+ if (rr != null) {
+ builder.setResource(convertToProtoFormat(rr));
+ }
+ }
+
+ private void maybeInitBuilder() {
+ if (viaProto || builder == null) {
+ builder = PreemptionResourceRequestProto.newBuilder(proto);
+ }
+ viaProto = false;
+ }
+
+ @Override
+ public synchronized ResourceRequest getResourceRequest() {
+ PreemptionResourceRequestProtoOrBuilder p = viaProto ? proto : builder;
+ if (rr != null) {
+ return rr;
+ }
+ if (!p.hasResource()) {
+ return null;
+ }
+ rr = convertFromProtoFormat(p.getResource());
+ return rr;
+ }
+
+ @Override
+ public synchronized void setResourceRequest(final ResourceRequest rr) {
+ maybeInitBuilder();
+ if (null == rr) {
+ builder.clearResource();
+ }
+ this.rr = rr;
+ }
+
+ private ResourceRequestPBImpl convertFromProtoFormat(ResourceRequestProto p) {
+ return new ResourceRequestPBImpl(p);
+ }
+
+ private ResourceRequestProto convertToProtoFormat(ResourceRequest t) {
+ return ((ResourceRequestPBImpl)t).getProto();
+ }
+
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StrictPreemptionContractPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StrictPreemptionContractPBImpl.java
new file mode 100644
index 0000000000000..7759ba22c2ec4
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StrictPreemptionContractPBImpl.java
@@ -0,0 +1,148 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
+
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.yarn.api.protocolrecords.PreemptionContainer;
+import org.apache.hadoop.yarn.api.protocolrecords.StrictPreemptionContract;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.PreemptionContainerProto;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.StrictPreemptionContractProto;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.StrictPreemptionContractProtoOrBuilder;
+
+public class StrictPreemptionContractPBImpl implements StrictPreemptionContract {
+
+ StrictPreemptionContractProto proto =
+ StrictPreemptionContractProto.getDefaultInstance();
+ StrictPreemptionContractProto.Builder builder = null;
+
+ boolean viaProto = false;
+ private Set<PreemptionContainer> containers;
+
+ public StrictPreemptionContractPBImpl() {
+ builder = StrictPreemptionContractProto.newBuilder();
+ }
+
+ public StrictPreemptionContractPBImpl(StrictPreemptionContractProto proto) {
+ this.proto = proto;
+ viaProto = true;
+ }
+
+ public synchronized StrictPreemptionContractProto getProto() {
+ mergeLocalToProto();
+ proto = viaProto ? proto : builder.build();
+ viaProto = true;
+ return proto;
+ }
+
+ private void mergeLocalToProto() {
+ if (viaProto)
+ maybeInitBuilder();
+ mergeLocalToBuilder();
+ proto = builder.build();
+ viaProto = true;
+ }
+
+ private void mergeLocalToBuilder() {
+ if (this.containers != null) {
+ addContainersToProto();
+ }
+ }
+
+ private void maybeInitBuilder() {
+ if (viaProto || builder == null) {
+ builder = StrictPreemptionContractProto.newBuilder(proto);
+ }
+ viaProto = false;
+ }
+
+ @Override
+ public synchronized Set<PreemptionContainer> getContainers() {
+ initIds();
+ return containers;
+ }
+
+ @Override
+ public synchronized void setContainers(
+ final Set<PreemptionContainer> containers) {
+ if (null == containers) {
+ builder.clearContainer();
+ }
+ this.containers = containers;
+ }
+
+ private void initIds() {
+ if (containers != null) {
+ return;
+ }
+ StrictPreemptionContractProtoOrBuilder p = viaProto ? proto : builder;
+ List<PreemptionContainerProto> list = p.getContainerList();
+ containers = new HashSet<PreemptionContainer>();
+
+ for (PreemptionContainerProto c : list) {
+ containers.add(convertFromProtoFormat(c));
+ }
+ }
+
+ private void addContainersToProto() {
+ maybeInitBuilder();
+ builder.clearContainer();
+ if (containers == null) {
+ return;
+ }
+ Iterable<PreemptionContainerProto> iterable = new Iterable<PreemptionContainerProto>() {
+ @Override
+ public Iterator<PreemptionContainerProto> iterator() {
+ return new Iterator<PreemptionContainerProto>() {
+
+ Iterator<PreemptionContainer> iter = containers.iterator();
+
+ @Override
+ public boolean hasNext() {
+ return iter.hasNext();
+ }
+
+ @Override
+ public PreemptionContainerProto next() {
+ return convertToProtoFormat(iter.next());
+ }
+
+ @Override
+ public void remove() {
+ throw new UnsupportedOperationException();
+
+ }
+ };
+
+ }
+ };
+ builder.addAllContainer(iterable);
+ }
+
+ private PreemptionContainerPBImpl convertFromProtoFormat(PreemptionContainerProto p) {
+ return new PreemptionContainerPBImpl(p);
+ }
+
+ private PreemptionContainerProto convertToProtoFormat(PreemptionContainer t) {
+ return ((PreemptionContainerPBImpl)t).getProto();
+ }
+
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_service_protos.proto b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_service_protos.proto
index ad3b5f180720f..6ac02741bac15 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_service_protos.proto
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_service_protos.proto
@@ -66,9 +66,30 @@ message AllocateResponseProto {
optional ResourceProto limit = 5;
repeated NodeReportProto updated_nodes = 6;
optional int32 num_cluster_nodes = 7;
+ optional PreemptionMessageProto preempt = 8;
}
+message PreemptionMessageProto {
+ optional StrictPreemptionContractProto strictContract = 1;
+ optional PreemptionContractProto contract = 2;
+}
+
+message StrictPreemptionContractProto {
+ repeated PreemptionContainerProto container = 1;
+}
+
+message PreemptionContractProto {
+ repeated PreemptionResourceRequestProto resource = 1;
+ repeated PreemptionContainerProto container = 2;
+}
+
+message PreemptionContainerProto {
+ optional ContainerIdProto id = 1;
+}
+message PreemptionResourceRequestProto {
+ optional ResourceRequestProto resource = 1;
+}
//////////////////////////////////////////////////////
/////// client_RM_Protocol ///////////////////////////
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClientAsync.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClientAsync.java
index d95ce64f6303b..ff2c0a441a9bf 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClientAsync.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClientAsync.java
@@ -113,7 +113,7 @@ public AllocateResponse answer(InvocationOnMock invocation)
private AllocateResponse createAllocateResponse(
List<ContainerStatus> completed, List<Container> allocated) {
AllocateResponse response = BuilderUtils.newAllocateResponse(0, completed, allocated,
- new ArrayList<NodeReport>(), null, false, 1);
+ new ArrayList<NodeReport>(), null, false, 1, null);
return response;
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java
index f09046e37126f..e6699f3927896 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java
@@ -29,6 +29,7 @@
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.PreemptionMessage;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -404,7 +405,8 @@ public static AllocateRequest newAllocateRequest(
public static AllocateResponse newAllocateResponse(int responseId,
List<ContainerStatus> completedContainers,
List<Container> allocatedContainers, List<NodeReport> updatedNodes,
- Resource availResources, boolean reboot, int numClusterNodes) {
+ Resource availResources, boolean reboot, int numClusterNodes,
+ PreemptionMessage preempt) {
AllocateResponse response = recordFactory
.newRecordInstance(AllocateResponse.class);
response.setNumClusterNodes(numClusterNodes);
@@ -414,6 +416,7 @@ public static AllocateResponse newAllocateResponse(int responseId,
response.setUpdatedNodes(updatedNodes);
response.setAvailableResources(availResources);
response.setReboot(reboot);
+ response.setPreemptionMessage(preempt);
return response;
}
|
d4d1ccc992dc500518eaf0f50f425a6d2dde2c7c
|
hadoop
|
YARN-1424. RMAppAttemptImpl should return the- DummyApplicationResourceUsageReport for all invalid accesses. (Ray Chiang via- kasha)--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1601745 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 3f58ef5b34898..30ed0939a23fa 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -136,6 +136,10 @@ Release 2.5.0 - UNRELEASED
YARN-2030. Augmented RMStateStore with state machine.(Binglin Chang via jianhe)
+ YARN-1424. RMAppAttemptImpl should return the
+ DummyApplicationResourceUsageReport for all invalid accesses.
+ (Ray Chiang via kasha)
+
OPTIMIZATIONS
BUG FIXES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationResourceUsageReport.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationResourceUsageReport.java
index 8de9ff3baf143..6e9c76fb01207 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationResourceUsageReport.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationResourceUsageReport.java
@@ -47,7 +47,7 @@ public static ApplicationResourceUsageReport newInstance(
}
/**
- * Get the number of used containers
+ * Get the number of used containers. -1 for invalid/inaccessible reports.
* @return the number of used containers
*/
@Public
@@ -63,7 +63,7 @@ public static ApplicationResourceUsageReport newInstance(
public abstract void setNumUsedContainers(int num_containers);
/**
- * Get the number of reserved containers
+ * Get the number of reserved containers. -1 for invalid/inaccessible reports.
* @return the number of reserved containers
*/
@Private
@@ -79,7 +79,7 @@ public static ApplicationResourceUsageReport newInstance(
public abstract void setNumReservedContainers(int num_reserved_containers);
/**
- * Get the used <code>Resource</code>
+ * Get the used <code>Resource</code>. -1 for invalid/inaccessible reports.
* @return the used <code>Resource</code>
*/
@Public
@@ -91,7 +91,7 @@ public static ApplicationResourceUsageReport newInstance(
public abstract void setUsedResources(Resource resources);
/**
- * Get the reserved <code>Resource</code>
+ * Get the reserved <code>Resource</code>. -1 for invalid/inaccessible reports.
* @return the reserved <code>Resource</code>
*/
@Public
@@ -103,7 +103,7 @@ public static ApplicationResourceUsageReport newInstance(
public abstract void setReservedResources(Resource reserved_resources);
/**
- * Get the needed <code>Resource</code>
+ * Get the needed <code>Resource</code>. -1 for invalid/inaccessible reports.
* @return the needed <code>Resource</code>
*/
@Public
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMServerUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMServerUtils.java
index e884d29e3039d..d93c45d0d79e9 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMServerUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMServerUtils.java
@@ -28,6 +28,7 @@
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeState;
import org.apache.hadoop.yarn.api.records.Resource;
@@ -43,6 +44,8 @@
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils;
+import org.apache.hadoop.yarn.server.utils.BuilderUtils;
+import org.apache.hadoop.yarn.util.resource.Resources;
/**
* Utility methods to aid serving RM data through the REST and RPC APIs
@@ -225,4 +228,13 @@ public static YarnApplicationAttemptState createApplicationAttemptState(
}
}
+ /**
+ * Statically defined dummy ApplicationResourceUsageREport. Used as
+ * a return value when a valid report cannot be found.
+ */
+ public static final ApplicationResourceUsageReport
+ DUMMY_APPLICATION_RESOURCE_USAGE_REPORT =
+ BuilderUtils.newApplicationResourceUsageReport(-1, -1,
+ Resources.createResource(-1, -1), Resources.createResource(-1, -1),
+ Resources.createResource(-1, -1));
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
index bbd135b9901cd..3318f1582a0e1 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
@@ -71,6 +71,7 @@
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppRemovedSchedulerEvent;
+import org.apache.hadoop.yarn.server.resourcemanager.RMServerUtils;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.state.InvalidStateTransitonException;
import org.apache.hadoop.yarn.state.MultipleArcTransition;
@@ -293,11 +294,6 @@ RMAppEventType.KILL, new KillAttemptTransition())
private final StateMachine<RMAppState, RMAppEventType, RMAppEvent>
stateMachine;
- private static final ApplicationResourceUsageReport
- DUMMY_APPLICATION_RESOURCE_USAGE_REPORT =
- BuilderUtils.newApplicationResourceUsageReport(-1, -1,
- Resources.createResource(-1, -1), Resources.createResource(-1, -1),
- Resources.createResource(-1, -1));
private static final int DUMMY_APPLICATION_ATTEMPT_NUMBER = -1;
public RMAppImpl(ApplicationId applicationId, RMContext rmContext,
@@ -498,7 +494,7 @@ public ApplicationReport createAndGetApplicationReport(String clientUserName,
String origTrackingUrl = UNAVAILABLE;
int rpcPort = -1;
ApplicationResourceUsageReport appUsageReport =
- DUMMY_APPLICATION_RESOURCE_USAGE_REPORT;
+ RMServerUtils.DUMMY_APPLICATION_RESOURCE_USAGE_REPORT;
FinalApplicationStatus finishState = getFinalApplicationStatus();
String diags = UNAVAILABLE;
float progress = 0.0f;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
index 5b1a17d930b5b..2a1170d41df29 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
@@ -675,9 +675,7 @@ public ApplicationResourceUsageReport getApplicationResourceUsageReport() {
ApplicationResourceUsageReport report =
scheduler.getAppResourceUsageReport(this.getAppAttemptId());
if (report == null) {
- Resource none = Resource.newInstance(0, 0);
- report = ApplicationResourceUsageReport.newInstance(0, 0, none, none,
- none);
+ report = RMServerUtils.DUMMY_APPLICATION_RESOURCE_USAGE_REPORT;
}
return report;
} finally {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java
index 045b5b4ae9478..4b1f59c303903 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java
@@ -77,6 +77,7 @@
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
@@ -258,6 +259,28 @@ public void testGetApplicationAttemptReport() throws YarnException,
}
}
+ @Test
+ public void testGetApplicationResourceUsageReportDummy() throws YarnException,
+ IOException {
+ ApplicationAttemptId attemptId = getApplicationAttemptId(1);
+ YarnScheduler yarnScheduler = mockYarnScheduler();
+ RMContext rmContext = mock(RMContext.class);
+ mockRMContext(yarnScheduler, rmContext);
+ when(rmContext.getDispatcher().getEventHandler()).thenReturn(
+ new EventHandler<Event>() {
+ public void handle(Event event) {
+ }
+ });
+ ApplicationSubmissionContext asContext =
+ mock(ApplicationSubmissionContext.class);
+ YarnConfiguration config = new YarnConfiguration();
+ RMAppAttemptImpl rmAppAttemptImpl = new RMAppAttemptImpl(attemptId,
+ rmContext, yarnScheduler, null, asContext, config, false);
+ ApplicationResourceUsageReport report = rmAppAttemptImpl
+ .getApplicationResourceUsageReport();
+ assertEquals(report, RMServerUtils.DUMMY_APPLICATION_RESOURCE_USAGE_REPORT);
+ }
+
@Test
public void testGetApplicationAttempts() throws YarnException, IOException {
ClientRMService rmService = createRMService();
@@ -964,6 +987,8 @@ private static YarnScheduler mockYarnScheduler() {
Arrays.asList(getApplicationAttemptId(101), getApplicationAttemptId(102)));
when(yarnScheduler.getAppsInQueue(QUEUE_2)).thenReturn(
Arrays.asList(getApplicationAttemptId(103)));
+ ApplicationAttemptId attemptId = getApplicationAttemptId(1);
+ when(yarnScheduler.getAppResourceUsageReport(attemptId)).thenReturn(null);
return yarnScheduler;
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/TestRMAppTransitions.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/TestRMAppTransitions.java
index e89b71b41056f..0fd3c3c5c99aa 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/TestRMAppTransitions.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/TestRMAppTransitions.java
@@ -51,6 +51,7 @@
import org.apache.hadoop.yarn.server.resourcemanager.RMAppManagerEventType;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.RMContextImpl;
+import org.apache.hadoop.yarn.server.resourcemanager.RMServerUtils;
import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore.ApplicationState;
@@ -921,6 +922,7 @@ public void testGetAppReport() {
assertAppState(RMAppState.NEW, app);
ApplicationReport report = app.createAndGetApplicationReport(null, true);
Assert.assertNotNull(report.getApplicationResourceUsageReport());
+ Assert.assertEquals(report.getApplicationResourceUsageReport(),RMServerUtils.DUMMY_APPLICATION_RESOURCE_USAGE_REPORT);
report = app.createAndGetApplicationReport("clientuser", true);
Assert.assertNotNull(report.getApplicationResourceUsageReport());
}
|
c4dd556b92f4b712e3a151181fc15af067322c3d
|
kotlin
|
Fix for EA-39487--
|
c
|
https://github.com/JetBrains/kotlin
|
diff --git a/idea/src/org/jetbrains/jet/plugin/quickfix/ExclExclCallFix.java b/idea/src/org/jetbrains/jet/plugin/quickfix/ExclExclCallFix.java
index a267e2c58223c..d6507d823ee4b 100644
--- a/idea/src/org/jetbrains/jet/plugin/quickfix/ExclExclCallFix.java
+++ b/idea/src/org/jetbrains/jet/plugin/quickfix/ExclExclCallFix.java
@@ -26,6 +26,7 @@
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
import org.jetbrains.jet.lang.psi.JetExpression;
import org.jetbrains.jet.lang.psi.JetFile;
import org.jetbrains.jet.lang.psi.JetPostfixExpression;
@@ -112,14 +113,14 @@ private static boolean isAvailableForRemove(Editor editor, PsiFile file) {
private static PsiElement getExclExclElement(Editor editor, PsiFile file) {
final PsiElement elementAtCaret = file.findElementAt(editor.getCaretModel().getOffset());
- if (elementAtCaret instanceof LeafPsiElement) {
- LeafPsiElement leafElement = (LeafPsiElement) elementAtCaret;
- if (leafElement.getElementType() == JetTokens.EXCLEXCL) {
- return elementAtCaret;
- }
+ if (isExclExclLeaf(elementAtCaret)) {
+ return elementAtCaret;
+ }
- LeafPsiElement prevLeaf = (LeafPsiElement) PsiTreeUtil.prevLeaf(leafElement);
- if (prevLeaf != null && prevLeaf.getElementType() == JetTokens.EXCLEXCL) {
+ if (elementAtCaret != null) {
+ // Case when caret is placed right after !!
+ PsiElement prevLeaf = PsiTreeUtil.prevLeaf(elementAtCaret);
+ if (isExclExclLeaf(prevLeaf)) {
return prevLeaf;
}
}
@@ -127,6 +128,10 @@ private static PsiElement getExclExclElement(Editor editor, PsiFile file) {
return null;
}
+ private static boolean isExclExclLeaf(@Nullable PsiElement element) {
+ return (element instanceof LeafPsiElement) && ((LeafPsiElement) element).getElementType() == JetTokens.EXCLEXCL;
+ }
+
private static JetExpression getExpressionForIntroduceCall(Editor editor, PsiFile file) {
final PsiElement elementAtCaret = file.findElementAt(editor.getCaretModel().getOffset());
if (elementAtCaret != null) {
|
d9a4fb410d6608260a597b1d72b9153c585caa5d
|
spring-framework
|
Introduce "dummy" Environment implementation--For testing purposes in which an Environment implementation is required-but a ConfigurableEnvironment is not desirable.--All methods are no-ops and return null, therefore NPEs are likely.-
|
a
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-core/src/test/java/org/springframework/core/env/DummyEnvironment.java b/spring-core/src/test/java/org/springframework/core/env/DummyEnvironment.java
new file mode 100644
index 000000000000..88d9347dfa1b
--- /dev/null
+++ b/spring-core/src/test/java/org/springframework/core/env/DummyEnvironment.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2002-2013 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.core.env;
+
+public class DummyEnvironment implements Environment {
+
+ public boolean containsProperty(String key) {
+ return false;
+ }
+
+ public String getProperty(String key) {
+ return null;
+ }
+
+ public String getProperty(String key, String defaultValue) {
+ return null;
+ }
+
+ public <T> T getProperty(String key, Class<T> targetType) {
+ return null;
+ }
+
+ public <T> T getProperty(String key, Class<T> targetType, T defaultValue) {
+ return null;
+ }
+
+ public <T> Class<T> getPropertyAsClass(String key, Class<T> targetType) {
+ return null;
+ }
+
+ public String getRequiredProperty(String key) throws IllegalStateException {
+ return null;
+ }
+
+ public <T> T getRequiredProperty(String key, Class<T> targetType)
+ throws IllegalStateException {
+ return null;
+ }
+
+ public String resolvePlaceholders(String text) {
+ return null;
+ }
+
+ public String resolveRequiredPlaceholders(String text)
+ throws IllegalArgumentException {
+ return null;
+ }
+
+ public String[] getActiveProfiles() {
+ return null;
+ }
+
+ public String[] getDefaultProfiles() {
+ return null;
+ }
+
+ public boolean acceptsProfiles(String... profiles) {
+ return false;
+ }
+
+}
diff --git a/spring-webmvc/src/test/java/org/springframework/web/servlet/DispatcherServletTests.java b/spring-webmvc/src/test/java/org/springframework/web/servlet/DispatcherServletTests.java
index 147875e4ca4a..bb2d0a089b04 100644
--- a/spring-webmvc/src/test/java/org/springframework/web/servlet/DispatcherServletTests.java
+++ b/spring-webmvc/src/test/java/org/springframework/web/servlet/DispatcherServletTests.java
@@ -16,12 +16,6 @@
package org.springframework.web.servlet;
-import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.CoreMatchers.instanceOf;
-import static org.hamcrest.CoreMatchers.notNullValue;
-import static org.hamcrest.CoreMatchers.sameInstance;
-import static org.junit.Assert.assertThat;
-
import java.io.IOException;
import java.util.Locale;
@@ -36,15 +30,15 @@
import org.springframework.beans.MutablePropertyValues;
import org.springframework.beans.PropertyValue;
-import org.springframework.tests.sample.beans.TestBean;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.support.DefaultMessageSourceResolvable;
import org.springframework.core.env.ConfigurableEnvironment;
-import org.springframework.core.env.StandardEnvironment;
+import org.springframework.core.env.DummyEnvironment;
import org.springframework.mock.web.test.MockHttpServletRequest;
import org.springframework.mock.web.test.MockHttpServletResponse;
import org.springframework.mock.web.test.MockServletConfig;
import org.springframework.mock.web.test.MockServletContext;
+import org.springframework.tests.sample.beans.TestBean;
import org.springframework.web.bind.EscapedErrors;
import org.springframework.web.context.ConfigurableWebEnvironment;
import org.springframework.web.context.ServletConfigAwareBean;
@@ -65,6 +59,9 @@
import org.springframework.web.servlet.view.InternalResourceViewResolver;
import org.springframework.web.util.WebUtils;
+import static org.hamcrest.CoreMatchers.*;
+import static org.junit.Assert.*;
+
/**
* @author Rod Johnson
* @author Juergen Hoeller
@@ -842,8 +839,8 @@ public void testEnvironmentOperations() {
servlet.setEnvironment(env1); // should succeed
assertThat(servlet.getEnvironment(), sameInstance(env1));
try {
- servlet.setEnvironment(new StandardEnvironment());
- fail("expected exception");
+ servlet.setEnvironment(new DummyEnvironment());
+ fail("expected IllegalArgumentException for non-configurable Environment");
}
catch (IllegalArgumentException ex) {
}
|
0ea33300e63850254f9490588b6e41042f281d0f
|
drools
|
JBRULES-3126 NPE when retracting an object with a- collection field which has been accumulated on--
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/AccumulateTest.java b/drools-compiler/src/test/java/org/drools/integrationtests/AccumulateTest.java
index 4958c657043..8a5528e359e 100644
--- a/drools-compiler/src/test/java/org/drools/integrationtests/AccumulateTest.java
+++ b/drools-compiler/src/test/java/org/drools/integrationtests/AccumulateTest.java
@@ -18,7 +18,6 @@
import java.util.List;
import java.util.Set;
-import junit.framework.Assert;
import org.drools.Cheese;
import org.drools.Cheesery;
import org.drools.FactHandle;
@@ -1904,19 +1903,16 @@ public void testAccumulateAndRetract() {
kbuilder.add( ResourceFactory.newByteArrayResource(drl.getBytes()),
ResourceType.DRL );
if (kbuilder.hasErrors()) {
- System.err.println(kbuilder.getErrors());
- Assert.fail(kbuilder.getErrors().toString());
+ fail(kbuilder.getErrors().toString());
}
KnowledgeBase kb = KnowledgeBaseFactory.newKnowledgeBase();
kb.addKnowledgePackages(kbuilder.getKnowledgePackages());
StatefulKnowledgeSession ks = kb.newStatefulKnowledgeSession();
-
ArrayList resList = new ArrayList();
ks.setGlobal("list",resList);
-
ArrayList<String> list = new ArrayList<String>();
list.add("x");
list.add("y");
@@ -1925,7 +1921,7 @@ public void testAccumulateAndRetract() {
ks.insert(list);
ks.fireAllRules();
- Assert.assertEquals(3L, resList.get(0));
+ assertEquals(3L, resList.get(0));
}
|
bfb2bf6246c48caaca6926e9cba6fae052242939
|
restlet-framework-java
|
JAX-RS extension: - added to changes.txt, that I- switched to API 0.11 - fixed a big bug related to the determining of the- MessageBodyWriter - using org.restlet.data.Conditions.getStatus() for- javax.ws.rs.core.Request.evaluatePreconditions() instead of using an own- implementation (the Restlet method has a bug, I'm waiting for the patch to be- applied) - fixed a bug in JaxRsUriBuilder.buildFromMap*() - returning status- 500 instead of 406, if no message body writer could be found (according to a- change in the specification)--
|
c
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/build/tmpl/text/changes.txt b/build/tmpl/text/changes.txt
index 31944d2089..0dfb6fac68 100644
--- a/build/tmpl/text/changes.txt
+++ b/build/tmpl/text/changes.txt
@@ -19,6 +19,8 @@ Changes log
Ricard.
- Misc
- Updated Db4O to version 7.4.58.
+ - API Enhancements
+ - Updated to JAX-RS API 0.11
- 1.1 Release candidate 1 (08/20/2008)
- Breaking change
- The logger names used by Restlet have been fully refactored
diff --git a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/JaxRsRestlet.java b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/JaxRsRestlet.java
index 7748f95fc5..3eb1d1abb0 100644
--- a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/JaxRsRestlet.java
+++ b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/JaxRsRestlet.java
@@ -45,6 +45,7 @@
import java.util.List;
import java.util.Set;
+import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.GenericEntity;
import javax.ws.rs.core.MultivaluedMap;
@@ -393,8 +394,6 @@ private ResObjAndMeth requestMatching() throws RequestHandledException,
return method;
}
- // FIXME neues: sortierkreterium explizite RegExp vor ohne (=String)
-
/**
* Identifies the root resource class, see JAX-RS-Spec (2008-04-16), section
* 3.7.2 "Request Matching", Part 1: "Identify the root resource class"
@@ -738,7 +737,7 @@ private Representation convertToRepresentation(Object entity,
methodAnnotations = EMPTY_ANNOTATION_ARRAY;
if (entity instanceof GenericEntity) {
- GenericEntity<?> genericEntity = (GenericEntity) entity;
+ GenericEntity<?> genericEntity = (GenericEntity<?>) entity;
genericReturnType = genericEntity.getType();
entityClass = genericEntity.getRawType();
entity = genericEntity.getEntity();
@@ -749,44 +748,39 @@ private Representation convertToRepresentation(Object entity,
else
genericReturnType = null;
if (genericReturnType instanceof Class
- && ((Class) genericReturnType)
+ && ((Class<?>) genericReturnType)
.isAssignableFrom(javax.ws.rs.core.Response.class)) {
genericReturnType = entityClass;
}
}
- MessageBodyWriterSubSet mbws;
+ final MultivaluedMap<String, Object> httpResponseHeaders = new WrappedRequestForHttpHeaders(
+ tlContext.get().getResponse(), jaxRsRespHeaders);
+ final Representation repr;
+
if (entity != null) {
+ final MediaType respMediaType = determineMediaType(
+ jaxRsResponseMediaType, resourceMethod, entityClass,
+ genericReturnType);
+
+ final MessageBodyWriterSubSet mbws;
mbws = providers.writerSubSet(entityClass, genericReturnType);
if (mbws.isEmpty())
throw excHandler.noMessageBodyWriter(entityClass,
genericReturnType, methodAnnotations, null, null);
- } else {
- mbws = MessageBodyWriterSubSet.empty();
- }
- final MediaType respMediaType;
- if (jaxRsResponseMediaType != null)
- respMediaType = jaxRsResponseMediaType;
- else if (resourceMethod != null)
- respMediaType = determineMediaType(resourceMethod, mbws);
- else
- respMediaType = MediaType.TEXT_PLAIN;
- final Response response = tlContext.get().getResponse();
- MultivaluedMap<String, Object> httpResponseHeaders = new WrappedRequestForHttpHeaders(
- response, jaxRsRespHeaders);
- final Representation repr;
- if (entity == null) {
- repr = Representation.createEmpty();
- repr.setMediaType(respMediaType);
- } else {
- final MessageBodyWriter mbw;
- mbw = mbws.getBestWriter(respMediaType, accMediaTypes);
+
+ final MessageBodyWriter mbw = mbws.getBestWriter(respMediaType,
+ methodAnnotations, accMediaTypes);
if (mbw == null)
throw excHandler.noMessageBodyWriter(entityClass,
genericReturnType, methodAnnotations, respMediaType,
accMediaTypes);
repr = new JaxRsOutputRepresentation(entity, genericReturnType,
respMediaType, methodAnnotations, mbw, httpResponseHeaders);
+ } else { // entity == null
+ repr = Representation.createEmpty();
+ repr.setMediaType(determineMediaType(jaxRsResponseMediaType,
+ resourceMethod, entityClass, genericReturnType));
}
repr.setCharacterSet(getSupportedCharSet(httpResponseHeaders));
return repr;
@@ -796,26 +790,42 @@ else if (resourceMethod != null)
* Determines the MediaType for a response, see JAX-RS-Spec (2008-08-27),
* section 3.8 "Determining the MediaType of Responses"
*
+ * @param jaxRsResponseMediaType
* @param resourceMethod
* The ResourceMethod that created the entity.
+ * @param entityClass
+ * needed, if neither the resource method nor the resource
+ * class is annotated with @{@link Produces}.
+ * @param genericReturnType
+ * needed, if neither the resource method nor the resource
+ * class is annotated with @{@link Produces}.
+ * @param methodAnnotation
+ * needed, if neither the resource method nor the resource
+ * class is annotated with @{@link Produces}.
* @param mbws
* The {@link MessageBodyWriter}s, that support the class of
- * the returned entity object.
- * @return the determined {@link MediaType}
- * @throws RequestHandledException
+ * the returned entity object as generic type of the
+ * {@link MessageBodyWriter}.
+ * @return the determined {@link MediaType}. If no method is given,
+ * "text/plain" is returned.
* @throws WebApplicationException
*/
- private MediaType determineMediaType(ResourceMethod resourceMethod,
- MessageBodyWriterSubSet mbws) throws WebApplicationException {
+ private MediaType determineMediaType(MediaType jaxRsResponseMediaType,
+ ResourceMethod resourceMethod, Class<?> entityClass,
+ Type genericReturnType) throws WebApplicationException {
// 1. if the Response contains a MediaType, use it.
- // TODO wenn MediaType in Response enthalten, nimm den
+ if (jaxRsResponseMediaType != null)
+ return jaxRsResponseMediaType;
+ if (resourceMethod == null)
+ return MediaType.TEXT_PLAIN;
CallContext callContext = tlContext.get();
// 2. Gather the set of producible media types P:
// (a) + (b)
Collection<MediaType> p = resourceMethod.getProducedMimes();
// 2. (c)
if (p.isEmpty()) {
- p = mbws.getAllProducibleMediaTypes();
+ p = providers.writerSubSet(entityClass, genericReturnType)
+ .getAllProducibleMediaTypes();
// 3.
if (p.isEmpty())
// '*/*', in conjunction with 8.:
diff --git a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/core/CallContext.java b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/core/CallContext.java
index ad43c7cf46..5ea28a2e5d 100644
--- a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/core/CallContext.java
+++ b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/core/CallContext.java
@@ -26,8 +26,15 @@
*/
package org.restlet.ext.jaxrs.internal.core;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.Reader;
+import java.io.Writer;
import java.net.URI;
import java.net.URISyntaxException;
+import java.nio.channels.ReadableByteChannel;
+import java.nio.channels.WritableByteChannel;
import java.security.Principal;
import java.util.ArrayList;
import java.util.Arrays;
@@ -64,11 +71,9 @@
import org.restlet.data.ChallengeResponse;
import org.restlet.data.ChallengeScheme;
import org.restlet.data.CharacterSet;
-import org.restlet.data.Conditions;
import org.restlet.data.Dimension;
import org.restlet.data.Form;
import org.restlet.data.Language;
-import org.restlet.data.Method;
import org.restlet.data.Reference;
import org.restlet.data.Request;
import org.restlet.data.Status;
@@ -339,18 +344,6 @@ protected void checkChangeable() throws IllegalStateException {
}
}
- private boolean checkIfOneMatch(List<Tag> requestETags, Tag entityTag) {
- if (entityTag.isWeak()) {
- return false;
- }
- for (final Tag requestETag : requestETags) {
- if (entityTag.equals(requestETag)) {
- return true;
- }
- }
- return false;
- }
-
/**
* Creates an unmodifiable List of {@link PathSegment}s.
*
@@ -425,8 +418,82 @@ public boolean equals(Object anotherObject) {
* @see javax.ws.rs.core.Request#evaluatePreconditions(java.util.Date)
*/
public ResponseBuilder evaluatePreconditions(Date lastModified) {
- // TODO throw IllegalArgumentException if null
- return evaluatePreconditions(lastModified, null);
+ if (lastModified == null) {
+ throw new IllegalArgumentException(
+ "The last modification date must not be null");
+ }
+ return evaluatePreconditionsInternal(lastModified, null);
+ }
+
+ /**
+ * Evaluates the preconditions of the current request against the given last
+ * modified date and / or the given entity tag. This method does not check,
+ * if the arguments are not null.
+ *
+ * @param lastModified
+ * @param entityTag
+ * @return
+ * @see Request#evaluateConditions(Tag, Date)
+ */
+ private ResponseBuilder evaluatePreconditionsInternal(
+ final Date lastModified, final EntityTag entityTag) {
+ // Status status = this.request.getConditions().getStatus(
+ // this.request.getMethod(), true,
+ // Converter.toRestletTag(entityTag), lastModified);
+ Status status = this.request.getConditions().getStatus(
+ this.request.getMethod(), new Representation() {
+ // this anonymous class is a temporary solution
+ // see commented code above
+ @Override
+ public Tag getTag() {
+ return Converter.toRestletTag(entityTag);
+ }
+
+ @Override
+ public Date getModificationDate() {
+ return lastModified;
+ }
+
+ @Override
+ public ReadableByteChannel getChannel() throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public Reader getReader() throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public InputStream getStream() throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void write(OutputStream outputStream) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void write(WritableByteChannel writableChannel) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void write(Writer writer) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+ });
+ if (status == null)
+ return null;
+ if (status.equals(Status.REDIRECTION_NOT_MODIFIED)) {
+ final ResponseBuilder rb = Response.notModified();
+ rb.lastModified(lastModified);
+ rb.tag(entityTag);
+ return rb;
+ } else {
+ return Response.status(STATUS_PREC_FAILED);
+ }
}
/**
@@ -465,76 +532,15 @@ public ResponseBuilder evaluatePreconditions(Date lastModified) {
*/
public ResponseBuilder evaluatePreconditions(Date lastModified,
EntityTag entityTag) {
- // TODO throw IllegalArgumentException if null
- // NICE Vorbed. werden mit Conditions.getStatus() unterstuetzt.
- if ((lastModified == null) && (entityTag == null)) {
- return null;
- }
- ResponseBuilder rb = null;
- final Method requestMethod = this.request.getMethod();
- final Conditions conditions = this.request.getConditions();
- if (lastModified != null) {
- // Header "If-Modified-Since"
- final Date modSinceCond = conditions.getModifiedSince();
- if (modSinceCond != null) {
- if (modSinceCond.after(lastModified)) {
- // the Entity was not changed
- final boolean readRequest = requestMethod
- .equals(Method.GET)
- || requestMethod.equals(Method.HEAD);
- if (readRequest) {
- rb = Response.notModified();
- rb.lastModified(lastModified);
- rb.tag(entityTag);
- } else {
- return precFailed("The entity was not modified since "
- + Util.formatDate(modSinceCond, false));
- }
- } else {
- // entity was changed -> check for other precoditions
- }
- }
- // Header "If-Unmodified-Since"
- final Date unmodSinceCond = conditions.getUnmodifiedSince();
- if (unmodSinceCond != null) {
- if (unmodSinceCond.after(lastModified)) {
- // entity was not changed -> Web Service must recalculate it
- return null;
- } else {
- // the Entity was changed
- return precFailed("The entity was modified since "
- + Util.formatDate(unmodSinceCond, false));
- }
- }
+ if (lastModified == null) {
+ throw new IllegalArgumentException(
+ "The last modification date must not be null");
}
- if (entityTag != null) {
- final Tag actualEntityTag = Converter.toRestletTag(entityTag);
- // Header "If-Match"
- final List<Tag> requestMatchETags = conditions.getMatch();
- if (!requestMatchETags.isEmpty()) {
- final boolean match = checkIfOneMatch(requestMatchETags,
- actualEntityTag);
- if (!match) {
- return precFailed("The entity does not match Entity Tag "
- + entityTag);
- }
- } else {
- // default answer to the request
- }
- // Header "If-None-Match"
- final List<Tag> requestNoneMatchETags = conditions.getNoneMatch();
- if (!requestNoneMatchETags.isEmpty()) {
- final boolean match = checkIfOneMatch(requestNoneMatchETags,
- actualEntityTag);
- if (match) {
- return precFailed("The entity matches Entity Tag "
- + entityTag);
- }
- } else {
- // default answer to the request
- }
+ if (entityTag == null) {
+ throw new IllegalArgumentException(
+ "The entity tag must not be null");
}
- return rb;
+ return evaluatePreconditionsInternal(lastModified, entityTag);
}
/**
@@ -554,8 +560,11 @@ public ResponseBuilder evaluatePreconditions(Date lastModified,
* @see javax.ws.rs.core.Request#evaluatePreconditions(javax.ws.rs.core.EntityTag)
*/
public ResponseBuilder evaluatePreconditions(EntityTag entityTag) {
- // TODO throw IllegalArgumentException if null
- return evaluatePreconditions(null, entityTag);
+ if (entityTag == null) {
+ throw new IllegalArgumentException(
+ "The entity tag must not be null");
+ }
+ return evaluatePreconditionsInternal(null, entityTag);
}
/**
@@ -1196,23 +1205,6 @@ public Iterator<String> pathSegementEncIter(PathParam pathParam) {
throw new NotYetImplementedException();
}
- /**
- * Creates a response with status 412 (Precondition Failed).
- *
- * @param entityMessage
- * Plain Text error message. Will be returned as entity.
- * @return Returns a response with status 412 (Precondition Failed) and the
- * given message as entity.
- */
- private ResponseBuilder precFailed(String entityMessage) {
- final ResponseBuilder rb = Response.status(STATUS_PREC_FAILED);
- rb.entity(entityMessage);
- rb.language(Language.ENGLISH.getName());
- rb.type(Converter.toJaxRsMediaType(
- org.restlet.data.MediaType.TEXT_PLAIN, null));
- return rb;
- }
-
/**
* Select the representation variant that best matches the request. More
* explicit variants are chosen ahead of less explicit ones. A vary header
diff --git a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/core/JaxRsUriBuilder.java b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/core/JaxRsUriBuilder.java
index b867137924..632a5fa75a 100644
--- a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/core/JaxRsUriBuilder.java
+++ b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/core/JaxRsUriBuilder.java
@@ -216,7 +216,7 @@ public URI buildFromEncoded(Object... values)
@Override
public URI buildFromEncodedMap(Map<String, ? extends Object> values)
throws IllegalArgumentException, UriBuilderException {
- return this.buildFromMap(values, true);
+ return this.buildFromMap(values, false);
}
/**
@@ -225,7 +225,7 @@ public URI buildFromEncodedMap(Map<String, ? extends Object> values)
@Override
public URI buildFromMap(Map<String, ? extends Object> values)
throws IllegalArgumentException, UriBuilderException {
- return this.buildFromMap(values, false);
+ return this.buildFromMap(values, true);
}
/**
@@ -236,6 +236,8 @@ public URI buildFromMap(Map<String, ? extends Object> values)
*
* @param values
* a map of URI template parameter names and values
+ * @param encode
+ * true, if the value should be encoded, or false if not.
* @return the URI built from the UriBuilder
* @throws IllegalArgumentException
* if automatic encoding is disabled and a supplied value
@@ -247,7 +249,7 @@ public URI buildFromMap(Map<String, ? extends Object> values)
* @see javax.ws.rs.core.UriBuilder#build(java.util.Map)
*/
private URI buildFromMap(final Map<String, ? extends Object> values,
- final boolean encoded) throws IllegalArgumentException,
+ final boolean encode) throws IllegalArgumentException,
UriBuilderException {
final Template template = new Template(toStringWithCheck(false));
return buildUri(template.format(new Resolver<String>() {
@@ -259,7 +261,7 @@ public String resolve(String variableName) {
"The value Map must contain a value for all given Templet variables. The value for variable "
+ variableName + " is missing");
}
- return EncodeOrCheck.all(varValue.toString(), encoded);
+ return EncodeOrCheck.all(varValue.toString(), encode);
}
}));
}
diff --git a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/todo/Notizen.java b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/todo/Notizen.java
index 04472b0a2b..79c202c301 100644
--- a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/todo/Notizen.java
+++ b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/todo/Notizen.java
@@ -39,8 +39,6 @@ public class Notizen {
// TESTEN do not decode @FormParam, @MatrixParam, @QueryParam
// TESTEN do not encode keys of Form entity
- // TODO status 500 instead of 406, if no message body writer could be found
-
// REQUEST could the implementation see, what is required to add
// after precondition evaluating into the ResponseBuilder
diff --git a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/util/AlgorithmUtil.java b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/util/AlgorithmUtil.java
index 61287c6055..86c4310c87 100644
--- a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/util/AlgorithmUtil.java
+++ b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/util/AlgorithmUtil.java
@@ -283,7 +283,7 @@ private static List<MediaType> getConsOrProdMimes(
* <li>the number of capturing groups as a secondary key (descending
* order),</li>
* <li>the number of capturing groups with non-default regular expressions
- * (i.e. not "([ˆ/]+?)") as the tertiary key (descending order), and </li>
+ * (i.e. not "([^/]+?)") as the tertiary key (descending order), and </li>
* <li>the source of each member as quaternary key sorting those derived
* from T<sub>method</sub> ahead of those derived from T<sub>locator</sub>.</li>
* </ol>
diff --git a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/util/EncodeOrCheck.java b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/util/EncodeOrCheck.java
index fe2b89737a..8eb84ee9cd 100644
--- a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/util/EncodeOrCheck.java
+++ b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/util/EncodeOrCheck.java
@@ -84,7 +84,7 @@ public class EncodeOrCheck {
* Checks / encodes all chars of the given char sequence.
*
* @param string
- * @param encode
+ * @param encode true, if the value should be encoded, or false if not.
* @return
* @throws IllegalArgumentException
* if encode is false and at least one character of the
@@ -673,7 +673,7 @@ public static void toHex(char toEncode, StringBuilder stb) {
*
* @param c
* @param stb
- * @param encode
+ * @param encode true, if the value should be encoded, or false if not.
* @throws IllegalArgumentException
*/
private static void toHexOrReject(char c, StringBuilder stb, boolean encode)
diff --git a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/util/ExceptionHandler.java b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/util/ExceptionHandler.java
index 2cb25c5bc1..79c987a168 100644
--- a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/util/ExceptionHandler.java
+++ b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/util/ExceptionHandler.java
@@ -237,7 +237,7 @@ public WebApplicationException noMessageBodyWriter(
annotations.toString(); // LATER log also annotations
// NICE get as parameters the accMediaTypes and the entityClass.
// and return supported MediaTypes as entity
- throw new WebApplicationException(Status.NOT_ACCEPTABLE);
+ throw new WebApplicationException(Status.INTERNAL_SERVER_ERROR);
}
/**
diff --git a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/util/Util.java b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/util/Util.java
index 5d63b6871a..bd9921be90 100644
--- a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/util/Util.java
+++ b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/util/Util.java
@@ -35,11 +35,13 @@
import java.lang.reflect.AccessibleObject;
import java.lang.reflect.Array;
import java.lang.reflect.Field;
+import java.lang.reflect.GenericArrayType;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
+import java.lang.reflect.TypeVariable;
import java.nio.charset.Charset;
import java.security.AccessController;
import java.security.PrivilegedAction;
@@ -74,10 +76,12 @@
import org.restlet.ext.jaxrs.internal.exceptions.IllegalPathException;
import org.restlet.ext.jaxrs.internal.exceptions.IllegalPathOnClassException;
import org.restlet.ext.jaxrs.internal.exceptions.IllegalPathOnMethodException;
+import org.restlet.ext.jaxrs.internal.exceptions.ImplementationException;
import org.restlet.ext.jaxrs.internal.exceptions.InjectException;
import org.restlet.ext.jaxrs.internal.exceptions.JaxRsRuntimeException;
import org.restlet.ext.jaxrs.internal.exceptions.MethodInvokeException;
import org.restlet.ext.jaxrs.internal.exceptions.MissingAnnotationException;
+import org.restlet.ext.jaxrs.internal.provider.JaxbElementProvider;
import org.restlet.resource.Representation;
import org.restlet.util.DateUtils;
import org.restlet.util.Engine;
@@ -621,14 +625,26 @@ public static <K, V> V getFirstValue(Map<K, V> map)
*/
public static Class<?> getGenericClass(Class<?> clazz,
Class<?> implInterface) {
- for (Type genericType : clazz.getGenericInterfaces()) {
- if (!(genericType instanceof ParameterizedType)) {
+ return getGenericClass(clazz, implInterface, null);
+ }
+
+ private static Class<?> getGenericClass(Class<?> clazz,
+ Class<?> implInterface, Type[] gsatp) {
+ if(clazz.equals(JaxbElementProvider.class)) {
+ clazz.toString();
+ }
+ else if(clazz.equals(MultivaluedMap.class)) {
+ clazz.toString();
+ }
+ for (Type ifGenericType : clazz.getGenericInterfaces()) {
+ if (!(ifGenericType instanceof ParameterizedType)) {
continue;
}
- final ParameterizedType pt = (ParameterizedType) genericType;
+ final ParameterizedType pt = (ParameterizedType) ifGenericType;
if (!pt.getRawType().equals(implInterface))
continue;
- final Type atp = pt.getActualTypeArguments()[0];
+ final Type[] atps = pt.getActualTypeArguments();
+ final Type atp = atps[0];
if (atp instanceof Class) {
return (Class<?>) atp;
}
@@ -638,10 +654,95 @@ public static Class<?> getGenericClass(Class<?> clazz,
return (Class<?>) rawType;
}
}
+ if (atp instanceof TypeVariable<?>) {
+ TypeVariable<?> tv = (TypeVariable<?>) atp;
+ String name = tv.getName();
+ // clazz = AbstractProvider
+ // implInterface = MessageBodyReader
+ // name = "T"
+ // pt = MessageBodyReader<T>
+ for (int i = 0; i < atps.length; i++) {
+ TypeVariable<?> tv2 = (TypeVariable<?>)atps[i];
+ if (tv2.getName().equals(name)) {
+ Type gsatpn = gsatp[i];
+ if(gsatpn instanceof Class) {
+ return (Class<?>)gsatpn;
+ }
+ if(gsatpn instanceof ParameterizedType) {
+ final Type rawType = ((ParameterizedType)gsatpn).getRawType();
+ if(rawType instanceof Class)
+ return (Class<?>)rawType;
+ throw new ImplementationException("Sorry, don't know how to return the class here");
+ }
+ if(gsatpn instanceof GenericArrayType) {
+ Type genCompType = ((GenericArrayType)gsatpn).getGenericComponentType();
+ return getArrayClass(genCompType, gsatpn);
+ }
+ //if(gsatpn instanceof TypeVariable) {
+ // TypeVariable<Class<?>> tvn = (TypeVariable)gsatpn;
+ // Class<?> cl = tvn.getGenericDeclaration();
+ // Type[] boulds = tvn.getBounds();
+ // cl.toString();
+ //}
+ // throw new ImplementationException("Sorry, could not handle a "+gsatpn.getClass());
+ }
+ }
+ }
}
+ Class<?> superClass = clazz.getSuperclass();
+ Type genericSuperClass = clazz.getGenericSuperclass();
+ if(genericSuperClass instanceof Class) {
+ return null;
+ }
+ if(gsatp == null) {
+ // LATER this is a hack
+ gsatp = ((ParameterizedType) genericSuperClass)
+ .getActualTypeArguments();
+ }
+ if (superClass != null)
+ return getGenericClass(superClass, implInterface, gsatp);
return null;
}
+ /**
+ * @param genCompType
+ * @param forMessage
+ * @throws NegativeArraySizeException
+ * @throws ImplementationException
+ */
+ private static Class<?> getArrayClass(Type genCompType, Type forMessage)
+ throws NegativeArraySizeException, ImplementationException {
+ if(genCompType.equals(Byte.TYPE)) {
+ return (new byte[0]).getClass();
+ }
+ if(genCompType.equals(Short.TYPE)) {
+ return (new short[0]).getClass();
+ }
+ if(genCompType.equals(Integer.TYPE)) {
+ return (new int[0]).getClass();
+ }
+ if(genCompType.equals(Long.TYPE)) {
+ return (new long[0]).getClass();
+ }
+ if(genCompType.equals(Float.TYPE)) {
+ return (new float[0]).getClass();
+ }
+ if(genCompType.equals(Double.TYPE)) {
+ return (new double[0]).getClass();
+ }
+ if(genCompType.equals(Character.TYPE)) {
+ return (new char[0]).getClass();
+ }
+ if(genCompType.equals(Boolean.TYPE)) {
+ return (new boolean[0]).getClass();
+ }
+ if(genCompType instanceof Class) {
+ return Array.newInstance((Class<?>)genCompType, 0).getClass();
+ }
+ throw new ImplementationException("Sorry, could not handle a "+forMessage.getClass());
+ // LATER could not handle all classes
+ }
+
/**
* Example: in List<String< -> out: String.class
*
diff --git a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/AbstractProviderWrapper.java b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/AbstractProviderWrapper.java
index 2a07d4f79e..74b4ac4141 100644
--- a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/AbstractProviderWrapper.java
+++ b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/AbstractProviderWrapper.java
@@ -37,6 +37,7 @@
import org.restlet.data.MediaType;
import org.restlet.ext.jaxrs.internal.util.Converter;
+import org.restlet.ext.jaxrs.internal.util.Util;
import org.restlet.ext.jaxrs.internal.wrappers.WrapperUtil;
/**
@@ -53,6 +54,10 @@ abstract class AbstractProviderWrapper implements ProviderWrapper {
private final List<org.restlet.data.MediaType> producedMimes;
+ private final Class<?> genericMbrType;
+
+ private final Class<?> genericMbwType;
+
/**
* Creates a new wrapper for a Provider and initializes the provider. If the
* given class is not a provider, an {@link IllegalArgumentException} is
@@ -83,6 +88,12 @@ abstract class AbstractProviderWrapper implements ProviderWrapper {
} else {
this.producedMimes = Collections.singletonList(MediaType.ALL);
}
+
+ this.genericMbrType = Util.getGenericClass(jaxRsProviderClass,
+ javax.ws.rs.ext.MessageBodyReader.class);
+ this.genericMbwType = Util.getGenericClass(jaxRsProviderClass,
+ javax.ws.rs.ext.MessageBodyWriter.class);
+ // LATER use Type instead of Class here
}
@Override
@@ -170,8 +181,17 @@ public List<MediaType> getProducedMimes() {
* java.lang.reflect.Type)
*/
public boolean supportsWrite(Class<?> entityClass, Type genericType) {
- // TODO AbstractProviderWrapper.supportsWrite(Class)
- return true;
+ if (entityClass == null) {
+ return false;
+ }
+ if (genericType == null) {
+ // LATER use Type instead of Class
+ }
+ if(this.genericMbwType == null) {
+ return false;
+ }
+ final boolean supportsWrite = this.genericMbwType.isAssignableFrom(entityClass);
+ return supportsWrite;
}
/**
@@ -187,8 +207,16 @@ public boolean supportsWrite(Class<?> entityClass, Type genericType) {
* @see MessageBodyReader#supportsRead(Class, Type)
*/
public boolean supportsRead(Class<?> entityClass, Type genericType) {
- // TODO AbstractProviderWrapper.supportsRead(Class)
- return true;
+ if (entityClass == null) {
+ return false;
+ }
+ if (genericType == null) {
+ // LATER use Type instead of Class
+ }
+ if(this.genericMbrType == null) {
+ return false;
+ }
+ return this.genericMbrType.isAssignableFrom(entityClass);
}
/**
diff --git a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/JaxRsProviders.java b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/JaxRsProviders.java
index e08f7431a8..78a7299644 100644
--- a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/JaxRsProviders.java
+++ b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/JaxRsProviders.java
@@ -462,26 +462,29 @@ private void remove(ProviderWrapper provider) {
}
/**
- * Returns a Collection of {@link MessageBodyWriter}s, that support the
- * given entityClass.
+ * Returns a Collection of {@link MessageBodyWriter}s, which generic type
+ * supports the given entityClass.
*
* @param entityClass
* @param genericType
* may be null
+ * @param annotations
+ * @param mediaType
* @return
* @see javax.ws.rs.ext.MessageBodyWriter#isWriteable(Class, Type,
* Annotation[])
*/
public MessageBodyWriterSubSet writerSubSet(Class<?> entityClass,
Type genericType) {
- // NICE optimization: may be cached for speed.
final List<MessageBodyWriter> mbws = new ArrayList<MessageBodyWriter>();
for (ProviderWrapper mbww : this.messageBodyWriterWrappers) {
MessageBodyWriter mbw = mbww.getInitializedWriter();
- if (mbw.supportsWrite(entityClass, genericType))
+ if (mbw.supportsWrite(entityClass, genericType)) {
mbws.add(mbw);
+ }
}
- return new MessageBodyWriterSubSet(mbws);
+ // NICE optimization: may be cached for speed.
+ return new MessageBodyWriterSubSet(mbws, entityClass, genericType);
}
/**
diff --git a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/MessageBodyReader.java b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/MessageBodyReader.java
index f4de685b0d..20cfe77fd0 100644
--- a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/MessageBodyReader.java
+++ b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/MessageBodyReader.java
@@ -29,8 +29,8 @@
import java.io.IOException;
import java.io.InputStream;
import java.lang.annotation.Annotation;
-import java.lang.reflect.Type;
import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Type;
import java.util.List;
import javax.ws.rs.WebApplicationException;
@@ -40,9 +40,10 @@
import org.restlet.data.MediaType;
/**
- * Class to wrap an initialized {@link javax.ws.rs.ext.MessageBodyWriter}
+ * Class to wrap an initialized {@link javax.ws.rs.ext.MessageBodyReader}
*
* @author Stephan Koops
+ * @see javax.ws.rs.ext.MessageBodyReader
*/
public interface MessageBodyReader {
diff --git a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/MessageBodyWriter.java b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/MessageBodyWriter.java
index 016f9ef0b4..1decd6fd7b 100644
--- a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/MessageBodyWriter.java
+++ b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/MessageBodyWriter.java
@@ -42,6 +42,7 @@
* {@link javax.ws.rs.ext.MessageBodyWriter}
*
* @author Stephan Koops
+ * @see javax.ws.rs.ext.MessageBodyWriter
*/
public interface MessageBodyWriter {
@@ -51,7 +52,7 @@ public interface MessageBodyWriter {
* @param genericType
* @param annotations
* @param mediaType
- * TODO
+ * The JAX-RS MediaType
* @return
* @see javax.ws.rs.ext.MessageBodyWriter#isWriteable(Class, Type,
* Annotation[])
@@ -67,13 +68,10 @@ public boolean isWriteable(Class<?> type, Type genericType,
* @param t
* the instance to write
* @param type
- * TODO
* @param genericType
- * TODO
* @param annotations
- * TODO
* @param mediaType
- * TODO
+ * The Restlet MediaType
* @return length in bytes or -1 if the length cannot be determined in
* advance
* @see javax.ws.rs.ext.MessageBodyWriter#getSize(Object, Class, Type,
@@ -88,6 +86,7 @@ public long getSize(Object t, Class<?> type, Type genericType,
* @param genericType
* @param annotations
* @param mediaType
+ * The Restlet MediaType
* @param httpHeaders
* @param entityStream
* @throws IOException
@@ -113,8 +112,8 @@ public void writeTo(Object object, Class<?> type, Type genericType,
* Returns the list of produced {@link MediaType}s of the wrapped
* {@link javax.ws.rs.ext.MessageBodyWriter}.
*
- * @return List of produced {@link MediaType}s. If the entity provider is
- * not annotated with @ {@link javax.ws.rs.Produces}, '*<!---->/*'
+ * @return List of produced Restlet {@link MediaType}s. If the entity
+ * provider is not annotated with @ {@link javax.ws.rs.Produces}, '*<!---->/*'
* is returned.
*/
public List<MediaType> getProducedMimes();
@@ -124,7 +123,7 @@ public void writeTo(Object object, Class<?> type, Type genericType,
* given {@link MediaType}s.
*
* @param mediaTypes
- * the {@link MediaType}s
+ * the Restlet {@link MediaType}s
* @return true, if at least one of the requested {@link MediaType}s is
* supported, otherwise false.
*/
@@ -135,7 +134,7 @@ public void writeTo(Object object, Class<?> type, Type genericType,
* {@link MediaType}.
*
* @param mediaType
- * the {@link MediaType}
+ * the Restlet {@link MediaType}
* @return true, if the requested {@link MediaType} is supported, otherwise
* false.
*/
diff --git a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/MessageBodyWriterSubSet.java b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/MessageBodyWriterSubSet.java
index 906af4e7ec..56b89f688e 100644
--- a/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/MessageBodyWriterSubSet.java
+++ b/modules/org.restlet.ext.jaxrs_0.11/src/org/restlet/ext/jaxrs/internal/wrappers/provider/MessageBodyWriterSubSet.java
@@ -26,11 +26,14 @@
*/
package org.restlet.ext.jaxrs.internal.wrappers.provider;
+import java.lang.annotation.Annotation;
+import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.restlet.data.MediaType;
+import org.restlet.ext.jaxrs.internal.util.Converter;
import org.restlet.ext.jaxrs.internal.util.SortedMetadata;
/**
@@ -41,7 +44,7 @@
public class MessageBodyWriterSubSet {
private static final MessageBodyWriterSubSet EMPTY = new MessageBodyWriterSubSet(
- new ArrayList<MessageBodyWriter>());
+ new ArrayList<MessageBodyWriter>(), null, null);
/**
* @return
@@ -50,10 +53,27 @@ public static MessageBodyWriterSubSet empty() {
return EMPTY;
}
+ /**
+ * The class supported by the contained message body writers, given by the
+ * type parameter of the {@link javax.ws.rs.ext.MessageBodyWriter}. Could
+ * be {@code null}.
+ */
+ private final Class<?> type;
+
+ /**
+ * The type supported by the contained message body writers, given by the
+ * type parameter of the {@link javax.ws.rs.ext.MessageBodyWriter}. Could
+ * be {@code null}.
+ */
+ private final Type genericType;
+
private final List<MessageBodyWriter> mbws;
- MessageBodyWriterSubSet(List<MessageBodyWriter> mbws) {
+ MessageBodyWriterSubSet(List<MessageBodyWriter> mbws, final Class<?> type,
+ final Type genericType) {
this.mbws = mbws;
+ this.genericType = genericType;
+ this.type = type;
}
/**
@@ -61,7 +81,8 @@ public static MessageBodyWriterSubSet empty() {
*
* @return a list of all producible media types. If this set is not empty,
* this result is not empty. '*<!---->/*' is returned for a message
- * body writer with no @{@link javax.ws.rs.Produces} annotation.
+ * body writer with no @{@link javax.ws.rs.Produces}
+ * annotation.
*/
public Collection<MediaType> getAllProducibleMediaTypes() {
final List<MediaType> p = new ArrayList<MediaType>();
@@ -76,22 +97,27 @@ public Collection<MediaType> getAllProducibleMediaTypes() {
* types of the response method and of the accepted {@link MediaType}s.
*
* @param determinedResponseMediaType
- * The {@link MediaType}s of the response, declared by the
- * resource methods or given by the
- * {@link javax.ws.rs.core.Response}.
+ * The {@link MediaType}s of the response, declared by the
+ * resource methods or given by the
+ * {@link javax.ws.rs.core.Response}.
+ * @param annotations
+ * TODO
* @param accMediaTypes
- * the accepted media types.
+ * the accepted media types.
* @return A {@link MessageBodyWriter} that best matches the given accepted.
* Returns null, if no adequate {@link MessageBodyWriter} could be
* found in this set.
*/
public MessageBodyWriter getBestWriter(
- MediaType determinedResponseMediaType,
+ MediaType determinedResponseMediaType, Annotation[] annotations,
SortedMetadata<MediaType> accMediaTypes) {
final List<MessageBodyWriter> mbws = new ArrayList<MessageBodyWriter>();
for (final MessageBodyWriter mbw : this.mbws) {
if (mbw.supportsWrite(determinedResponseMediaType)) {
- mbws.add(mbw);
+ if (mbw.isWriteable(type, genericType, annotations, Converter
+ .toJaxRsMediaType(determinedResponseMediaType))) {
+ mbws.add(mbw);
+ }
}
}
for (final Iterable<MediaType> amts : accMediaTypes.listOfColls()) {
diff --git a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/NoProviderTest.java b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/NoProviderTest.java
index ed88f10e95..4d8b3e7eab 100644
--- a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/NoProviderTest.java
+++ b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/NoProviderTest.java
@@ -44,11 +44,11 @@ protected Class<?> getRootResourceClass() {
public void testNoMediaType() throws Exception {
final Response response = get("no-mbw");
- assertEquals(Status.CLIENT_ERROR_NOT_ACCEPTABLE, response.getStatus());
+ assertEquals(Status.SERVER_ERROR_INTERNAL, response.getStatus());
}
public void testTextPlain() throws Exception {
final Response response = get("text-plain");
- assertEquals(Status.CLIENT_ERROR_NOT_ACCEPTABLE, response.getStatus());
+ assertEquals(Status.SERVER_ERROR_INTERNAL, response.getStatus());
}
}
\ No newline at end of file
diff --git a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/ProviderTest.java b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/ProviderTest.java
index 8ec4f0df86..c177793617 100644
--- a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/ProviderTest.java
+++ b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/ProviderTest.java
@@ -224,6 +224,7 @@ public void testInputStreamPost() throws Exception {
assertEquals("big test", entity.getText());
}
+ /** @see ProviderTestService#jaxbElementGet() */
public void testJaxbElementGet() throws Exception {
getAndCheckJaxb("jaxbElement");
}
@@ -297,6 +298,7 @@ public void testJaxbPost() throws Exception {
postAndCheckXml("jaxb");
}
+ /** @see ProviderTestService#mMapGet() */
public void testMultivaluedMapGet() throws Exception {
final Response response = get("MultivaluedMap");
assertEquals(Status.SUCCESS_OK, response.getStatus());
@@ -304,6 +306,7 @@ public void testMultivaluedMapGet() throws Exception {
assertEquals("lastname=Merkel&firstname=Angela", entity.getText());
}
+ /** @see ProviderTestService#mMapPost(javax.ws.rs.core.MultivaluedMap) */
public void testMultivaluedMapPost() throws Exception {
final Response response = post("MultivaluedMap", createForm()
.getWebRepresentation());
diff --git a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/RequestTest.java b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/RequestTest.java
index 4bb0c5e461..39f5953ab1 100644
--- a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/RequestTest.java
+++ b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/RequestTest.java
@@ -70,10 +70,16 @@ public class RequestTest extends JaxRsTestCase {
* @see EvaluatePreconditionService#getLastModificationDateFromDatastore()
*/
@SuppressWarnings("deprecation")
- public static final Date BEFORE = new Date(2007 - 1900, 11, 31); //2007-12-31
+ public static final Date BEFORE = new Date(2007 - 1900, 11, 31); // 2007-12-31
private static final Status PREC_FAILED = Status.CLIENT_ERROR_PRECONDITION_FAILED;
+ /**
+ * Conditions.getStatus() has a bug. I've send a patch; it's not yet
+ * patchNotApplied to the trunk, while writing this.
+ */
+ private static boolean CONDTIONS_GETSTATUS_PATCH_NOT_APPLIED = true;
+
/**
* @param modifiedSince
* @param entityTag
@@ -132,13 +138,12 @@ public void testDateAndEntityTag2Get() throws Exception {
}
public void testDateAndEntityTag2Put() throws Exception {
+ if (CONDTIONS_GETSTATUS_PATCH_NOT_APPLIED)
+ return;
final Conditions conditions = createConditions(AFTER,
getDatastoreETag());
final Response response = put("date", null, conditions);
assertEquals(PREC_FAILED, response.getStatus());
- assertTrue("Entity must contain \"was not modified\"", response
- .getEntity().getText().contains(
- "The entity was not modified since"));
}
public void testDateAndEntityTag3Get() throws Exception {
@@ -146,13 +151,6 @@ public void testDateAndEntityTag3Get() throws Exception {
"shkhsdk"));
final Response response = get("date", conditions);
assertEquals(PREC_FAILED, response.getStatus());
- final String entityText = response.getEntity().getText();
- assertTrue(
- "Entity must contain \"was not modified\" or \"does not match Entity Tag\", but is \""
- + entityText + "\"",
- entityText.contains("The entity was not modified since")
- || entityText
- .contains("The entity does not match Entity Tag"));
}
public void testDateAndEntityTag3Put() throws Exception {
@@ -160,13 +158,6 @@ public void testDateAndEntityTag3Put() throws Exception {
"shkhsdk"));
final Response response = put("date", null, conditions);
assertEquals(PREC_FAILED, response.getStatus());
- final String entityText = response.getEntity().getText();
- assertTrue(
- "Entity must contain \"was not modified\" or \"does not match Entity Tag\", but is \""
- + entityText + "\"",
- entityText.contains("The entity was not modified since")
- || entityText
- .contains("The entity does not match Entity Tag"));
}
public void testDateAndEntityTag4Get() throws Exception {
@@ -174,13 +165,6 @@ public void testDateAndEntityTag4Get() throws Exception {
new Tag("shkhsdk"));
final Response response = get("date", conditions);
assertEquals(PREC_FAILED, response.getStatus());
- final String entityText = response.getEntity().getText();
- assertTrue(
- "Entity must contain \"was not modified\" or \"does not match Entity Tag\", but is \""
- + entityText + "\"",
- entityText.contains("The entity was not modified since")
- || entityText
- .contains("The entity does not match Entity Tag"));
}
public void testDateAndEntityTag4Put() throws Exception {
@@ -188,13 +172,6 @@ public void testDateAndEntityTag4Put() throws Exception {
new Tag("shkhsdk"));
final Response response = put("date", null, conditions);
assertEquals(PREC_FAILED, response.getStatus());
- final String entityText = response.getEntity().getText();
- assertTrue(
- "Entity must contain \"was not modified\" or \"does not match Entity Tag\", but is \""
- + entityText + "\"",
- entityText.contains("The entity was not modified since")
- || entityText
- .contains("The entity does not match Entity Tag"));
}
public void testGetDateNotModified() throws Exception {
@@ -227,19 +204,13 @@ public void testGetEntityTagMatch() throws Exception {
conditions.setMatch(TestUtils.createList(new Tag("affer")));
response = get("date", conditions);
assertEquals(PREC_FAILED, response.getStatus());
- assertTrue("Entity must contain \"does not match Entity Tag\"",
- response.getEntity().getText().contains(
- "The entity does not match Entity Tag"));
}
public void testGetEntityTagNoneMatch() throws Exception {
Conditions conditions = new Conditions();
conditions.setNoneMatch(TestUtils.createList(getDatastoreETag()));
Response response = get("date", conditions);
- assertEquals(PREC_FAILED, response.getStatus());
- assertTrue("Entity must contain \"matches Entity Tag\"", response
- .getEntity().getText()
- .contains("The entity matches Entity Tag"));
+ assertEquals(Status.REDIRECTION_NOT_MODIFIED, response.getStatus());
conditions = new Conditions();
conditions.setNoneMatch(TestUtils.createList(new Tag("affer")));
@@ -287,8 +258,6 @@ public void testGetUnmodifiedSince() throws Exception {
conditions.setUnmodifiedSince(BEFORE);
response = get("date", conditions);
assertEquals(PREC_FAILED, response.getStatus());
- assertTrue("Entity must contain \"was modified\"", response.getEntity()
- .getText().contains("The entity was modified since"));
// NICE testen, was bei ungultigem Datum passiert:
// If-Unmodified-Since-Header ignorieren.
@@ -312,6 +281,8 @@ public void testOptions() {
* @throws Exception
*/
public void testPutModifiedSince() throws Exception {
+ if (CONDTIONS_GETSTATUS_PATCH_NOT_APPLIED)
+ return;
Conditions conditions = new Conditions();
conditions.setModifiedSince(BEFORE);
Response response = put("date", null, conditions);
@@ -321,9 +292,6 @@ public void testPutModifiedSince() throws Exception {
conditions.setModifiedSince(AFTER);
response = put("date", null, conditions);
assertEquals(PREC_FAILED, response.getStatus());
- assertTrue("Entity must contain \"was not modified\"", response
- .getEntity().getText().contains(
- "The entity was not modified since"));
}
public void testPutUnmodifiedSince() throws Exception {
@@ -336,9 +304,6 @@ public void testPutUnmodifiedSince() throws Exception {
conditions.setUnmodifiedSince(BEFORE);
response = put("date", null, conditions);
assertEquals(PREC_FAILED, response.getStatus());
- final String respEntity = response.getEntity().getText();
- assertTrue("Entity must contain \"was not modified\"", respEntity
- .contains("The entity was modified since"));
}
public void testSelectVariant() {
|
6afc4dfbbaae4b9492c426a38cef1ba035a3b2c5
|
camel
|
CAMEL-4077: Sanitiy check route in XML DSL, that- there is input and outputs in routes.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1134300 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/main/java/org/apache/camel/model/RouteDefinitionHelper.java b/camel-core/src/main/java/org/apache/camel/model/RouteDefinitionHelper.java
index d3d20b5ac4efe..3ce8d0879cc5e 100644
--- a/camel-core/src/main/java/org/apache/camel/model/RouteDefinitionHelper.java
+++ b/camel-core/src/main/java/org/apache/camel/model/RouteDefinitionHelper.java
@@ -23,6 +23,7 @@
import org.apache.camel.builder.ErrorHandlerBuilder;
import org.apache.camel.util.CamelContextHelper;
import org.apache.camel.util.EndpointHelper;
+import org.apache.camel.util.ObjectHelper;
/**
* Helper for {@link RouteDefinition}
@@ -134,6 +135,32 @@ public static void prepareRoute(CamelContext context, RouteDefinition route,
route.getOutputs().addAll(0, upper);
}
+ /**
+ * Sanity check the route, that it has input(s) and outputs.
+ *
+ * @param route the route
+ * @throws IllegalArgumentException is thrown if the route is invalid
+ */
+ public static void sanityCheckRoute(RouteDefinition route) {
+ ObjectHelper.notNull(route, "route");
+
+ if (route.getInputs() == null || route.getInputs().isEmpty()) {
+ String msg = "Route has no inputs: " + route;
+ if (route.getId() != null) {
+ msg = "Route " + route.getId() + " has no inputs: " + route;
+ }
+ throw new IllegalArgumentException(msg);
+ }
+
+ if (route.getOutputs() == null || route.getOutputs().isEmpty()) {
+ String msg = "Route has no outputs: " + route;
+ if (route.getId() != null) {
+ msg = "Route " + route.getId() + " has no outputs: " + route;
+ }
+ throw new IllegalArgumentException(msg);
+ }
+ }
+
private static void initParentAndErrorHandlerBuilder(CamelContext context, RouteDefinition route,
List<ProcessorDefinition> abstracts, List<OnExceptionDefinition> onExceptions) {
diff --git a/components/camel-core-xml/src/main/java/org/apache/camel/core/xml/AbstractCamelContextFactoryBean.java b/components/camel-core-xml/src/main/java/org/apache/camel/core/xml/AbstractCamelContextFactoryBean.java
index 9c427903e6251..4d494153c64c7 100644
--- a/components/camel-core-xml/src/main/java/org/apache/camel/core/xml/AbstractCamelContextFactoryBean.java
+++ b/components/camel-core-xml/src/main/java/org/apache/camel/core/xml/AbstractCamelContextFactoryBean.java
@@ -313,6 +313,9 @@ public void afterPropertiesSet() throws Exception {
*/
private void prepareRoutes() {
for (RouteDefinition route : getRoutes()) {
+ // sanity check first as the route is created using XML
+ RouteDefinitionHelper.sanityCheckRoute(route);
+
// leverage logic from route definition helper to prepare the route
RouteDefinitionHelper.prepareRoute(getContext(), route, getOnExceptions(), getIntercepts(), getInterceptFroms(),
getInterceptSendToEndpoints(), getOnCompletions());
diff --git a/components/camel-spring/src/test/java/org/apache/camel/spring/config/SpringRouteNoFromTest.java b/components/camel-spring/src/test/java/org/apache/camel/spring/config/SpringRouteNoFromTest.java
new file mode 100644
index 0000000000000..b271528e9fd91
--- /dev/null
+++ b/components/camel-spring/src/test/java/org/apache/camel/spring/config/SpringRouteNoFromTest.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.spring.config;
+
+import org.apache.camel.spring.SpringTestSupport;
+import org.springframework.context.support.AbstractXmlApplicationContext;
+import org.springframework.context.support.ClassPathXmlApplicationContext;
+
+/**
+ *
+ */
+public class SpringRouteNoFromTest extends SpringTestSupport {
+
+ @Override
+ protected void setUp() throws Exception {
+ createApplicationContext();
+ }
+
+ public void testRouteNoFrom() {
+ // noop
+ }
+
+ @Override
+ protected AbstractXmlApplicationContext createApplicationContext() {
+ try {
+ return new ClassPathXmlApplicationContext("org/apache/camel/spring/config/SpringRouteNoFromTest.xml");
+ } catch (Exception e) {
+ IllegalArgumentException iae = (IllegalArgumentException) e.getCause().getCause();
+ assertEquals("Route myRoute has no inputs: Route[[] -> [To[mock:result]]]", iae.getMessage());
+ return null;
+ }
+ }
+}
diff --git a/components/camel-spring/src/test/java/org/apache/camel/spring/config/SpringRouteNoOutputTest.java b/components/camel-spring/src/test/java/org/apache/camel/spring/config/SpringRouteNoOutputTest.java
new file mode 100644
index 0000000000000..8a4e7792e7918
--- /dev/null
+++ b/components/camel-spring/src/test/java/org/apache/camel/spring/config/SpringRouteNoOutputTest.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.spring.config;
+
+import org.apache.camel.spring.SpringTestSupport;
+import org.springframework.context.support.AbstractXmlApplicationContext;
+import org.springframework.context.support.ClassPathXmlApplicationContext;
+
+/**
+ *
+ */
+public class SpringRouteNoOutputTest extends SpringTestSupport {
+
+ @Override
+ protected void setUp() throws Exception {
+ createApplicationContext();
+ }
+
+ public void testRouteNoOutput() {
+ // noop
+ }
+
+ @Override
+ protected AbstractXmlApplicationContext createApplicationContext() {
+ try {
+ return new ClassPathXmlApplicationContext("org/apache/camel/spring/config/SpringRouteNoOutputTest.xml");
+ } catch (Exception e) {
+ IllegalArgumentException iae = (IllegalArgumentException) e.getCause().getCause();
+ assertEquals("Route myRoute has no outputs: Route[[From[direct:start]] -> []]", iae.getMessage());
+ return null;
+ }
+ }
+}
diff --git a/components/camel-spring/src/test/resources/org/apache/camel/spring/config/SpringRouteNoFromTest.xml b/components/camel-spring/src/test/resources/org/apache/camel/spring/config/SpringRouteNoFromTest.xml
new file mode 100644
index 0000000000000..9042234cc2c50
--- /dev/null
+++ b/components/camel-spring/src/test/resources/org/apache/camel/spring/config/SpringRouteNoFromTest.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="
+ http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
+ http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd
+ ">
+
+ <camelContext xmlns="http://camel.apache.org/schema/spring">
+ <!-- this route has no from -->
+ <route id="myRoute">
+ <to uri="mock:result"/>
+ </route>
+ </camelContext>
+
+</beans>
diff --git a/components/camel-spring/src/test/resources/org/apache/camel/spring/config/SpringRouteNoOutputTest.xml b/components/camel-spring/src/test/resources/org/apache/camel/spring/config/SpringRouteNoOutputTest.xml
new file mode 100644
index 0000000000000..caa5f7649645f
--- /dev/null
+++ b/components/camel-spring/src/test/resources/org/apache/camel/spring/config/SpringRouteNoOutputTest.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="
+ http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
+ http://camel.apache.org/schema/spring http://camel.apache.org/schema/spring/camel-spring.xsd
+ ">
+
+ <camelContext xmlns="http://camel.apache.org/schema/spring">
+ <!-- this route has no outputs -->
+ <route id="myRoute">
+ <from uri="direct:start"/>
+ </route>
+ </camelContext>
+
+</beans>
|
5b500c684ff416cf88937999b1205dd66d48bc67
|
orientdb
|
Removed old code to handle temporary records in- indexes--
|
p
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexMVRBTreeAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexMVRBTreeAbstract.java
index ec82eb5bed3..be590f0944c 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexMVRBTreeAbstract.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexMVRBTreeAbstract.java
@@ -17,7 +17,6 @@
import java.util.Collection;
import java.util.Collections;
-import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.Map.Entry;
@@ -62,7 +61,6 @@ public abstract class OIndexMVRBTreeAbstract extends OSharedResource implements
protected Set<String> clustersToIndex = new LinkedHashSet<String>();
protected OIndexCallback callback;
protected boolean automatic;
- protected Set<Object> tempItems = new HashSet<Object>();
@ODocumentInstance
protected ODocument configuration;
@@ -533,29 +531,7 @@ public void onAfterTxRollback(ODatabase iDatabase) {
public void onBeforeTxCommit(ODatabase iDatabase) {
}
- /**
- * Reset documents into the set to update its hashcode.
- */
public void onAfterTxCommit(ODatabase iDatabase) {
- acquireExclusiveLock();
-
- try {
- if (tempItems.size() > 0) {
- for (Object key : tempItems) {
- Set<OIdentifiable> set = map.get(key);
- if (set != null) {
- // RE-ADD ALL THE ITEM TO UPDATE THE HASHCODE (CHANGED AFTER SAVE+COMMIT)
- final ORecordLazySet newSet = new ORecordLazySet(configuration.getDatabase());
- newSet.addAll(set);
- map.put(key, newSet);
- }
- }
- }
- tempItems.clear();
-
- } finally {
- releaseExclusiveLock();
- }
}
public void onClose(ODatabase iDatabase) {
diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexNotUnique.java b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexNotUnique.java
index 34802bfeb47..33324ea2669 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexNotUnique.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexNotUnique.java
@@ -43,9 +43,6 @@ public OIndex put(final Object iKey, final ORecord<?> iSingleValue) {
if (!iSingleValue.getIdentity().isValid())
iSingleValue.save();
- if (iSingleValue.getIdentity().isTemporary())
- tempItems.add(iKey);
-
values.add(iSingleValue);
map.put(iKey, values);
|
35135dccec1710e50329b5c4a928ea67b4ae0464
|
orientdb
|
Fixed issue on empty links--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLCreateLink.java b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLCreateLink.java
index 98a9812434d..fc79a70cc0c 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLCreateLink.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLCreateLink.java
@@ -139,28 +139,28 @@ public Object execute(final Object... iArgs) {
// SEARCH THE DESTINATION RECORD
if (value instanceof String) {
if (((String) value).length() == 0)
- continue;
-
- value = "'" + value + "'";
+ value = null;
+ else {
+ value = "'" + value + "'";
+ result = database.command(new OSQLSynchQuery<ODocument>(cmd + value)).execute();
+
+ if (result == null || result.size() == 0)
+ // throw new OCommandExecutionException("Can't create link because the destination record was not found in class '"
+ // + destClass.getName() + "' and with the field '" + destField + "' equals to " + value);
+ value = null;
+ else if (result.size() > 1)
+ throw new OCommandExecutionException("Can't create link because multiple records was found in class '"
+ + destClass.getName() + "' with value " + value + " in field '" + destField + "'");
+ else
+ value = result.get(0);
+ }
+
+ // SET THE REFERENCE
+ doc.field(sourceField, value);
+ doc.save();
+
+ total++;
}
-
- result = database.command(new OSQLSynchQuery<ODocument>(cmd + value)).execute();
-
- if (result == null || result.size() == 0)
- // throw new OCommandExecutionException("Can't create link because the destination record was not found in class '"
- // + destClass.getName() + "' and with the field '" + destField + "' equals to " + value);
- value = null;
- else if (result.size() > 1)
- throw new OCommandExecutionException("Can't create link because multiple records was found in class '"
- + destClass.getName() + "' with value " + value + " in field '" + destField + "'");
- else
- value = result.get(0);
-
- // SET THE REFERENCE
- doc.field(sourceField, value);
- doc.save();
-
- total++;
}
}
}
|
a4870e1f053d17b9e9cbb64d6a9b8774a8815863
|
restlet-framework-java
|
- Fixed issues in Grizzly code.--
|
c
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/com.noelios.restlet.ext.grizzly_1.5/src/com/noelios/restlet/ext/grizzly/GrizzlyServerCall.java b/modules/com.noelios.restlet.ext.grizzly_1.5/src/com/noelios/restlet/ext/grizzly/GrizzlyServerCall.java
index 43b3d1bc35..af977017db 100644
--- a/modules/com.noelios.restlet.ext.grizzly_1.5/src/com/noelios/restlet/ext/grizzly/GrizzlyServerCall.java
+++ b/modules/com.noelios.restlet.ext.grizzly_1.5/src/com/noelios/restlet/ext/grizzly/GrizzlyServerCall.java
@@ -72,6 +72,21 @@ public class GrizzlyServerCall extends HttpServerCall {
public GrizzlyServerCall(Server server, ByteBuffer byteBuffer,
SelectionKey key, boolean confidential) {
super(server);
+ init(byteBuffer, key, confidential);
+ }
+
+ /**
+ * Initialize the call.
+ *
+ * @param byteBuffer
+ * The NIO byte buffer.
+ * @param key
+ * The NIO selection key.
+ * @param confidential
+ * Indicates if the call is confidential.
+ */
+ public void init(ByteBuffer byteBuffer, SelectionKey key,
+ boolean confidential) {
setConfidential(confidential);
try {
diff --git a/modules/com.noelios.restlet.ext.grizzly_1.5/src/com/noelios/restlet/ext/grizzly/GrizzlyServerHelper.java b/modules/com.noelios.restlet.ext.grizzly_1.5/src/com/noelios/restlet/ext/grizzly/GrizzlyServerHelper.java
index 80854b5da6..4f32e03191 100644
--- a/modules/com.noelios.restlet.ext.grizzly_1.5/src/com/noelios/restlet/ext/grizzly/GrizzlyServerHelper.java
+++ b/modules/com.noelios.restlet.ext.grizzly_1.5/src/com/noelios/restlet/ext/grizzly/GrizzlyServerHelper.java
@@ -56,7 +56,7 @@ public void start() throws Exception {
configure(this.controller);
}
- getLogger().info("Starting the Grizzly HTTP server");
+ getLogger().info("Starting the Grizzly " + getProtocols() + " server");
final Controller controller = this.controller;
new Thread() {
public void run() {
@@ -84,7 +84,8 @@ public void stop() throws Exception {
super.stop();
if (this.controller != null) {
- getLogger().info("Stopping the Grizzly HTTP server");
+ getLogger().info(
+ "Stopping the Grizzly " + getProtocols() + " server");
this.controller.stop();
}
}
diff --git a/modules/com.noelios.restlet.ext.grizzly_1.5/src/com/noelios/restlet/ext/grizzly/HttpParserFilter.java b/modules/com.noelios.restlet.ext.grizzly_1.5/src/com/noelios/restlet/ext/grizzly/HttpParserFilter.java
index 389ca49e68..c14f44e396 100644
--- a/modules/com.noelios.restlet.ext.grizzly_1.5/src/com/noelios/restlet/ext/grizzly/HttpParserFilter.java
+++ b/modules/com.noelios.restlet.ext.grizzly_1.5/src/com/noelios/restlet/ext/grizzly/HttpParserFilter.java
@@ -66,7 +66,11 @@ public boolean execute(Context context) throws IOException {
if (serverCall == null) {
serverCall = new GrizzlyServerCall(this.helper.getServer(),
byteBuffer, key, (helper instanceof HttpsServerHelper));
+ } else {
+ serverCall.init(byteBuffer, key,
+ (helper instanceof HttpsServerHelper));
}
+
boolean keepAlive = false;
// Handle the call
|
855142e35dc21c03b1c9bb0a33992e283a0b8791
|
kotlin
|
move test output to one level down--for ReadClassDataTest and CompileJavaAgainstKotlinTest-
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/tests/org/jetbrains/jet/CompileJavaAgainstKotlinTest.java b/compiler/tests/org/jetbrains/jet/CompileJavaAgainstKotlinTest.java
index 8bb9e576bc676..b856970872da2 100644
--- a/compiler/tests/org/jetbrains/jet/CompileJavaAgainstKotlinTest.java
+++ b/compiler/tests/org/jetbrains/jet/CompileJavaAgainstKotlinTest.java
@@ -53,7 +53,7 @@ public String getName() {
@Override
protected void setUp() throws Exception {
super.setUp();
- tmpdir = new File("tmp/" + this.getClass().getSimpleName() + "." + this.getName());
+ tmpdir = JetTestUtils.tmpDirForTest(this);
JetTestUtils.recreateDirectory(tmpdir);
}
diff --git a/compiler/tests/org/jetbrains/jet/JetTestUtils.java b/compiler/tests/org/jetbrains/jet/JetTestUtils.java
index dc777e336faaf..468862c185255 100644
--- a/compiler/tests/org/jetbrains/jet/JetTestUtils.java
+++ b/compiler/tests/org/jetbrains/jet/JetTestUtils.java
@@ -2,6 +2,7 @@
import com.google.common.collect.Lists;
import com.intellij.openapi.Disposable;
+import junit.framework.TestCase;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.jet.lang.cfg.pseudocode.JetControlFlowDataTraceFactory;
import org.jetbrains.jet.lang.diagnostics.Diagnostic;
@@ -166,6 +167,10 @@ public static void rmrf(File file) {
file.delete();
}
}
+
+ public static File tmpDirForTest(TestCase test) {
+ return new File("tmp/" + test.getClass().getSimpleName() + "/" + test.getName());
+ }
public static void recreateDirectory(File file) throws IOException {
rmrf(file);
diff --git a/compiler/tests/org/jetbrains/jet/ReadClassDataTest.java b/compiler/tests/org/jetbrains/jet/ReadClassDataTest.java
index d7e988b905295..abc9bab89f850 100644
--- a/compiler/tests/org/jetbrains/jet/ReadClassDataTest.java
+++ b/compiler/tests/org/jetbrains/jet/ReadClassDataTest.java
@@ -62,7 +62,7 @@ public ReadClassDataTest(@NotNull File testFile) {
@Override
protected void setUp() throws Exception {
super.setUp();
- tmpdir = new File("tmp/" + this.getClass().getSimpleName() + "." + this.getName());
+ tmpdir = JetTestUtils.tmpDirForTest(this);
JetTestUtils.recreateDirectory(tmpdir);
}
|
deb64435cb63102ac487bc00f081e4b3c05bc5b9
|
intellij-community
|
diff: better name for default diff tool--
|
p
|
https://github.com/JetBrains/intellij-community
|
diff --git a/platform/diff-impl/src/com/intellij/diff/tools/simple/SimpleDiffTool.java b/platform/diff-impl/src/com/intellij/diff/tools/simple/SimpleDiffTool.java
index bc8d6bfcda7f9..f22d9833ceb85 100644
--- a/platform/diff-impl/src/com/intellij/diff/tools/simple/SimpleDiffTool.java
+++ b/platform/diff-impl/src/com/intellij/diff/tools/simple/SimpleDiffTool.java
@@ -42,6 +42,6 @@ public boolean canShow(@NotNull DiffContext context, @NotNull DiffRequest reques
@NotNull
@Override
public String getName() {
- return "Default viewer";
+ return "Side-by-side viewer";
}
}
|
5924e74d550b3ac5e5d65c2fc80275095de1c0e1
|
hadoop
|
YARN-2768 Improved Yarn Registry service record- structure (stevel)--
|
p
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 8335d2b1c4f12..6689d894772e1 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -710,6 +710,8 @@ Release 2.6.0 - UNRELEASED
YARN-2677 registry punycoding of usernames doesn't fix all usernames to be
DNS-valid (stevel)
+ YARN-2768 Improved Yarn Registry service record structure (stevel)
+
---
YARN-2598 GHS should show N/A instead of null for the inaccessible information
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/cli/RegistryCli.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/cli/RegistryCli.java
index 863039e2e8b8e..bf2b5e5a54d34 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/cli/RegistryCli.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/cli/RegistryCli.java
@@ -24,6 +24,7 @@
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
+import java.util.Map;
import com.google.common.base.Preconditions;
import org.apache.commons.cli.CommandLine;
@@ -174,24 +175,22 @@ public int resolve(String [] args) {
ServiceRecord record = registry.resolve(argsList.get(1));
for (Endpoint endpoint : record.external) {
- if ((endpoint.protocolType.equals(ProtocolTypes.PROTOCOL_WEBUI))
- || (endpoint.protocolType.equals(ProtocolTypes.PROTOCOL_REST))) {
- sysout.print(" Endpoint(ProtocolType="
- + endpoint.protocolType + ", Api="
- + endpoint.api + "); Uris are: ");
- } else {
- sysout.print(" Endpoint(ProtocolType="
+ sysout.println(" Endpoint(ProtocolType="
+ endpoint.protocolType + ", Api="
+ endpoint.api + ");"
+ " Addresses(AddressType="
+ endpoint.addressType + ") are: ");
- }
- for (List<String> a : endpoint.addresses) {
- sysout.print(a + " ");
- }
- sysout.println();
- }
+ for (Map<String, String> address : endpoint.addresses) {
+ sysout.println(" [ ");
+ for (Map.Entry<String, String> entry : address.entrySet()) {
+ sysout.println(" " + entry.getKey()
+ + ": \"" + entry.getValue() + "\"");
+ }
+ sysout.println(" ]");
+ }
+ sysout.println();
+ }
return 0;
} catch (Exception e) {
syserr.println(analyzeException("resolve", e, argsList));
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/JsonSerDeser.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/JsonSerDeser.java
index e086e3694f6e8..af4e4f409c0c0 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/JsonSerDeser.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/JsonSerDeser.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.registry.client.binding;
import com.google.common.base.Preconditions;
+import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FSDataInputStream;
@@ -45,8 +46,6 @@
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
/**
* Support for marshalling objects to and from JSON.
@@ -62,30 +61,30 @@ public class JsonSerDeser<T> {
private static final Logger LOG = LoggerFactory.getLogger(JsonSerDeser.class);
private static final String UTF_8 = "UTF-8";
- public static final String E_NO_SERVICE_RECORD = "No service record at path";
+ public static final String E_NO_DATA = "No data at path";
+ public static final String E_DATA_TOO_SHORT = "Data at path too short";
+ public static final String E_MISSING_MARKER_STRING =
+ "Missing marker string: ";
private final Class<T> classType;
private final ObjectMapper mapper;
- private final byte[] header;
/**
* Create an instance bound to a specific type
* @param classType class to marshall
- * @param header byte array to use as header
*/
- public JsonSerDeser(Class<T> classType, byte[] header) {
+ public JsonSerDeser(Class<T> classType) {
Preconditions.checkArgument(classType != null, "null classType");
- Preconditions.checkArgument(header != null, "null header");
this.classType = classType;
this.mapper = new ObjectMapper();
mapper.configure(DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
- // make an immutable copy to keep findbugs happy.
- byte[] h = new byte[header.length];
- System.arraycopy(header, 0, h, 0, header.length);
- this.header = h;
}
+ /**
+ * Get the simple name of the class type to be marshalled
+ * @return the name of the class being marshalled
+ */
public String getName() {
return classType.getSimpleName();
}
@@ -183,7 +182,7 @@ public T load(FileSystem fs, Path path)
if (count != len) {
throw new EOFException(path.toString() + ": read finished prematurely");
}
- return fromBytes(path.toString(), b, 0);
+ return fromBytes(path.toString(), b);
}
/**
@@ -206,8 +205,7 @@ public void save(FileSystem fs, Path path, T instance,
* @throws IOException on any failure
*/
private void writeJsonAsBytes(T instance,
- DataOutputStream dataOutputStream) throws
- IOException {
+ DataOutputStream dataOutputStream) throws IOException {
try {
byte[] b = toBytes(instance);
dataOutputStream.write(b);
@@ -228,36 +226,50 @@ public byte[] toBytes(T instance) throws IOException {
}
/**
- * Convert JSON To bytes, inserting the header
- * @param instance instance to convert
- * @return a byte array
- * @throws IOException
+ * Deserialize from a byte array
+ * @param path path the data came from
+ * @param bytes byte array
+ * @throws IOException all problems
+ * @throws EOFException not enough data
+ * @throws InvalidRecordException if the parsing failed -the record is invalid
*/
- public byte[] toByteswithHeader(T instance) throws IOException {
- byte[] body = toBytes(instance);
-
- ByteBuffer buffer = ByteBuffer.allocate(body.length + header.length);
- buffer.put(header);
- buffer.put(body);
- return buffer.array();
+ public T fromBytes(String path, byte[] bytes) throws IOException,
+ InvalidRecordException {
+ return fromBytes(path, bytes, "");
}
/**
- * Deserialize from a byte array
+ * Deserialize from a byte array, optionally checking for a marker string.
+ * <p>
+ * If the marker parameter is supplied (and not empty), then its presence
+ * will be verified before the JSON parsing takes place; it is a fast-fail
+ * check. If not found, an {@link InvalidRecordException} exception will be
+ * raised
* @param path path the data came from
* @param bytes byte array
- * @return offset in the array to read from
+ * @param marker an optional string which, if set, MUST be present in the
+ * UTF-8 parsed payload.
+ * @return The parsed record
* @throws IOException all problems
* @throws EOFException not enough data
- * @throws InvalidRecordException if the parsing failed -the record is invalid
+ * @throws InvalidRecordException if the JSON parsing failed.
+ * @throws NoRecordException if the data is not considered a record: either
+ * it is too short or it did not contain the marker string.
*/
- public T fromBytes(String path, byte[] bytes, int offset) throws IOException,
- InvalidRecordException {
- int data = bytes.length - offset;
- if (data <= 0) {
- throw new EOFException("No data at " + path);
+ public T fromBytes(String path, byte[] bytes, String marker)
+ throws IOException, NoRecordException, InvalidRecordException {
+ int len = bytes.length;
+ if (len == 0 ) {
+ throw new NoRecordException(path, E_NO_DATA);
+ }
+ if (StringUtils.isNotEmpty(marker) && len < marker.length()) {
+ throw new NoRecordException(path, E_DATA_TOO_SHORT);
+ }
+ String json = new String(bytes, 0, len, UTF_8);
+ if (StringUtils.isNotEmpty(marker)
+ && !json.contains(marker)) {
+ throw new NoRecordException(path, E_MISSING_MARKER_STRING + marker);
}
- String json = new String(bytes, offset, data, UTF_8);
try {
return fromJson(json);
} catch (JsonProcessingException e) {
@@ -266,52 +278,7 @@ public T fromBytes(String path, byte[] bytes, int offset) throws IOException,
}
/**
- * Read from a byte array to a type, checking the header first
- * @param path source of data
- * @param buffer buffer
- * @return the parsed structure
- * Null if the record was too short or the header did not match
- * @throws IOException on a failure
- * @throws NoRecordException if header checks implied there was no record
- * @throws InvalidRecordException if record parsing failed
- */
- @SuppressWarnings("unchecked")
- public T fromBytesWithHeader(String path, byte[] buffer) throws IOException {
- int hlen = header.length;
- int blen = buffer.length;
- if (hlen > 0) {
- if (blen < hlen) {
- throw new NoRecordException(path, E_NO_SERVICE_RECORD);
- }
- byte[] magic = Arrays.copyOfRange(buffer, 0, hlen);
- if (!Arrays.equals(header, magic)) {
- LOG.debug("start of entry does not match service record header at {}",
- path);
- throw new NoRecordException(path, E_NO_SERVICE_RECORD);
- }
- }
- return fromBytes(path, buffer, hlen);
- }
-
- /**
- * Check if a buffer has a header which matches this record type
- * @param buffer buffer
- * @return true if there is a match
- * @throws IOException
- */
- public boolean headerMatches(byte[] buffer) throws IOException {
- int hlen = header.length;
- int blen = buffer.length;
- boolean matches = false;
- if (blen > hlen) {
- byte[] magic = Arrays.copyOfRange(buffer, 0, hlen);
- matches = Arrays.equals(header, magic);
- }
- return matches;
- }
-
- /**
- * Convert an object to a JSON string
+ * Convert an instance to a JSON string
* @param instance instance to convert
* @return a JSON string description
* @throws JsonParseException parse problems
@@ -324,4 +291,19 @@ public synchronized String toJson(T instance) throws IOException,
return mapper.writeValueAsString(instance);
}
+ /**
+ * Convert an instance to a string form for output. This is a robust
+ * operation which will convert any JSON-generating exceptions into
+ * error text.
+ * @param instance non-null instance
+ * @return a JSON string
+ */
+ public String toString(T instance) {
+ Preconditions.checkArgument(instance != null, "Null instance argument");
+ try {
+ return toJson(instance);
+ } catch (IOException e) {
+ return "Failed to convert to a string: " + e;
+ }
+ }
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryTypeUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryTypeUtils.java
index b4254a3beba9e..ec59d5985a044 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryTypeUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryTypeUtils.java
@@ -22,17 +22,19 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.registry.client.exceptions.InvalidRecordException;
-import org.apache.hadoop.registry.client.types.AddressTypes;
+import static org.apache.hadoop.registry.client.types.AddressTypes.*;
import org.apache.hadoop.registry.client.types.Endpoint;
import org.apache.hadoop.registry.client.types.ProtocolTypes;
+import org.apache.hadoop.registry.client.types.ServiceRecord;
import java.net.InetSocketAddress;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.util.ArrayList;
-import java.util.Arrays;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
/**
* Static methods to work with registry types —primarily endpoints and the
@@ -94,79 +96,66 @@ public static Endpoint inetAddrEndpoint(String api,
Preconditions.checkArgument(protocolType != null, "null protocolType");
Preconditions.checkArgument(hostname != null, "null hostname");
return new Endpoint(api,
- AddressTypes.ADDRESS_HOSTNAME_AND_PORT,
+ ADDRESS_HOSTNAME_AND_PORT,
protocolType,
- tuplelist(hostname, Integer.toString(port)));
+ hostnamePortPair(hostname, port));
}
/**
* Create an IPC endpoint
* @param api API
- * @param protobuf flag to indicate whether or not the IPC uses protocol
- * buffers
* @param address the address as a tuple of (hostname, port)
* @return the new endpoint
*/
- public static Endpoint ipcEndpoint(String api,
- boolean protobuf, List<String> address) {
- ArrayList<List<String>> addressList = new ArrayList<List<String>>();
- if (address != null) {
- addressList.add(address);
- }
+ public static Endpoint ipcEndpoint(String api, InetSocketAddress address) {
return new Endpoint(api,
- AddressTypes.ADDRESS_HOSTNAME_AND_PORT,
- protobuf ? ProtocolTypes.PROTOCOL_HADOOP_IPC_PROTOBUF
- : ProtocolTypes.PROTOCOL_HADOOP_IPC,
- addressList);
+ ADDRESS_HOSTNAME_AND_PORT,
+ ProtocolTypes.PROTOCOL_HADOOP_IPC,
+ address== null ? null: hostnamePortPair(address));
}
/**
- * Create a single-element list of tuples from the input.
- * that is, an input ("a","b","c") is converted into a list
- * in the form [["a","b","c"]]
- * @param t1 tuple elements
- * @return a list containing a single tuple
+ * Create a single entry map
+ * @param key map entry key
+ * @param val map entry value
+ * @return a 1 entry map.
*/
- public static List<List<String>> tuplelist(String... t1) {
- List<List<String>> outer = new ArrayList<List<String>>();
- outer.add(tuple(t1));
- return outer;
+ public static Map<String, String> map(String key, String val) {
+ Map<String, String> map = new HashMap<String, String>(1);
+ map.put(key, val);
+ return map;
}
/**
- * Create a tuples from the input.
- * that is, an input ("a","b","c") is converted into a list
- * in the form ["a","b","c"]
- * @param t1 tuple elements
- * @return a single tuple as a list
+ * Create a URI
+ * @param uri value
+ * @return a 1 entry map.
*/
- public static List<String> tuple(String... t1) {
- return Arrays.asList(t1);
+ public static Map<String, String> uri(String uri) {
+ return map(ADDRESS_URI, uri);
}
/**
- * Create a tuples from the input, converting all to Strings in the process
- * that is, an input ("a", 7, true) is converted into a list
- * in the form ["a","7,"true"]
- * @param t1 tuple elements
- * @return a single tuple as a list
+ * Create a (hostname, port) address pair
+ * @param hostname hostname
+ * @param port port
+ * @return a 1 entry map.
*/
- public static List<String> tuple(Object... t1) {
- List<String> l = new ArrayList<String>(t1.length);
- for (Object t : t1) {
- l.add(t.toString());
- }
- return l;
+ public static Map<String, String> hostnamePortPair(String hostname, int port) {
+ Map<String, String> map =
+ map(ADDRESS_HOSTNAME_FIELD, hostname);
+ map.put(ADDRESS_PORT_FIELD, Integer.toString(port));
+ return map;
}
/**
- * Convert a socket address pair into a string tuple, (host, port).
- * TODO JDK7: move to InetAddress.getHostString() to avoid DNS lookups.
- * @param address an address
- * @return an element for the address list
+ * Create a (hostname, port) address pair
+ * @param address socket address whose hostname and port are used for the
+ * generated address.
+ * @return a 1 entry map.
*/
- public static List<String> marshall(InetSocketAddress address) {
- return tuple(address.getHostName(), address.getPort());
+ public static Map<String, String> hostnamePortPair(InetSocketAddress address) {
+ return hostnamePortPair(address.getHostName(), address.getPort());
}
/**
@@ -199,24 +188,36 @@ public static List<String> retrieveAddressesUriType(Endpoint epr)
if (epr == null) {
return null;
}
- requireAddressType(AddressTypes.ADDRESS_URI, epr);
- List<List<String>> addresses = epr.addresses;
+ requireAddressType(ADDRESS_URI, epr);
+ List<Map<String, String>> addresses = epr.addresses;
if (addresses.size() < 1) {
throw new InvalidRecordException(epr.toString(),
"No addresses in endpoint");
}
List<String> results = new ArrayList<String>(addresses.size());
- for (List<String> address : addresses) {
- if (address.size() != 1) {
- throw new InvalidRecordException(epr.toString(),
- "Address payload invalid: wrong element count: " +
- address.size());
- }
- results.add(address.get(0));
+ for (Map<String, String> address : addresses) {
+ results.add(getAddressField(address, ADDRESS_URI));
}
return results;
}
+ /**
+ * Get a specific field from an address -raising an exception if
+ * the field is not present
+ * @param address address to query
+ * @param field field to resolve
+ * @return the resolved value. Guaranteed to be non-null.
+ * @throws InvalidRecordException if the field did not resolve
+ */
+ public static String getAddressField(Map<String, String> address,
+ String field) throws InvalidRecordException {
+ String val = address.get(field);
+ if (val == null) {
+ throw new InvalidRecordException("", "Missing address field: " + field);
+ }
+ return val;
+ }
+
/**
* Get the address URLs. Guranteed to return at least one address.
* @param epr endpoint
@@ -237,4 +238,53 @@ public static List<URL> retrieveAddressURLs(Endpoint epr)
}
return results;
}
+
+ /**
+ * Validate the record by checking for null fields and other invalid
+ * conditions
+ * @param path path for exceptions
+ * @param record record to validate. May be null
+ * @throws InvalidRecordException on invalid entries
+ */
+ public static void validateServiceRecord(String path, ServiceRecord record)
+ throws InvalidRecordException {
+ if (record == null) {
+ throw new InvalidRecordException(path, "Null record");
+ }
+ if (!ServiceRecord.RECORD_TYPE.equals(record.type)) {
+ throw new InvalidRecordException(path,
+ "invalid record type field: \"" + record.type + "\"");
+ }
+
+ if (record.external != null) {
+ for (Endpoint endpoint : record.external) {
+ validateEndpoint(path, endpoint);
+ }
+ }
+ if (record.internal != null) {
+ for (Endpoint endpoint : record.internal) {
+ validateEndpoint(path, endpoint);
+ }
+ }
+ }
+
+ /**
+ * Validate the endpoint by checking for null fields and other invalid
+ * conditions
+ * @param path path for exceptions
+ * @param endpoint endpoint to validate. May be null
+ * @throws InvalidRecordException on invalid entries
+ */
+ public static void validateEndpoint(String path, Endpoint endpoint)
+ throws InvalidRecordException {
+ if (endpoint == null) {
+ throw new InvalidRecordException(path, "Null endpoint");
+ }
+ try {
+ endpoint.validate();
+ } catch (RuntimeException e) {
+ throw new InvalidRecordException(path, e.toString());
+ }
+ }
+
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java
index 8caf4002feebf..68dc84e7bf749 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java
@@ -33,7 +33,6 @@
import org.apache.hadoop.registry.client.impl.zk.RegistryInternalConstants;
import org.apache.hadoop.registry.client.types.RegistryPathStatus;
import org.apache.hadoop.registry.client.types.ServiceRecord;
-import org.apache.hadoop.registry.client.types.ServiceRecordHeader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -314,7 +313,7 @@ public static Map<String, ServiceRecord> extractServiceRecords(
Collection<RegistryPathStatus> stats) throws IOException {
Map<String, ServiceRecord> results = new HashMap<String, ServiceRecord>(stats.size());
for (RegistryPathStatus stat : stats) {
- if (stat.size > ServiceRecordHeader.getLength()) {
+ if (stat.size > ServiceRecord.RECORD_TYPE.length()) {
// maybe has data
String path = join(parentpath, stat.path);
try {
@@ -344,7 +343,6 @@ public static Map<String, ServiceRecord> extractServiceRecords(
* <p>
* @param operations operation support for fetches
* @param parentpath path of the parent of all the entries
- * @param stats a map of name:value mappings.
* @return a possibly empty map of fullpath:record.
* @throws IOException for any IO Operation that wasn't ignored.
*/
@@ -362,7 +360,6 @@ public static Map<String, ServiceRecord> extractServiceRecords(
* <p>
* @param operations operation support for fetches
* @param parentpath path of the parent of all the entries
- * @param stats a map of name:value mappings.
* @return a possibly empty map of fullpath:record.
* @throws IOException for any IO Operation that wasn't ignored.
*/
@@ -382,7 +379,7 @@ public static Map<String, ServiceRecord> extractServiceRecords(
*/
public static class ServiceRecordMarshal extends JsonSerDeser<ServiceRecord> {
public ServiceRecordMarshal() {
- super(ServiceRecord.class, ServiceRecordHeader.getData());
+ super(ServiceRecord.class);
}
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/NoRecordException.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/NoRecordException.java
index 160433f081410..b81b9d4134131 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/NoRecordException.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/exceptions/NoRecordException.java
@@ -21,17 +21,11 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.registry.client.types.ServiceRecord;
-import org.apache.hadoop.registry.client.types.ServiceRecordHeader;
/**
* Raised if there is no {@link ServiceRecord} resolved at the end
- * of the specified path, for reasons such as:
- * <ul>
- * <li>There wasn't enough data to contain a Service Record.</li>
- * <li>The start of the data did not match the {@link ServiceRecordHeader}
- * header.</li>
- * </ul>
- *
+ * of the specified path.
+ * <p>
* There may be valid data of some form at the end of the path, but it does
* not appear to be a Service Record.
*/
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/impl/zk/RegistryOperationsService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/impl/zk/RegistryOperationsService.java
index 7c01bdf433e0c..271ab25463335 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/impl/zk/RegistryOperationsService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/impl/zk/RegistryOperationsService.java
@@ -24,9 +24,11 @@
import org.apache.hadoop.registry.client.api.BindFlags;
import org.apache.hadoop.registry.client.api.RegistryOperations;
+import org.apache.hadoop.registry.client.binding.RegistryTypeUtils;
import org.apache.hadoop.registry.client.binding.RegistryUtils;
import org.apache.hadoop.registry.client.binding.RegistryPathUtils;
import org.apache.hadoop.registry.client.exceptions.InvalidPathnameException;
+import org.apache.hadoop.registry.client.exceptions.NoRecordException;
import org.apache.hadoop.registry.client.types.RegistryPathStatus;
import org.apache.hadoop.registry.client.types.ServiceRecord;
import org.apache.zookeeper.CreateMode;
@@ -103,10 +105,12 @@ public void bind(String path,
int flags) throws IOException {
Preconditions.checkArgument(record != null, "null record");
validatePath(path);
+ // validate the record before putting it
+ RegistryTypeUtils.validateServiceRecord(path, record);
LOG.info("Bound at {} : {}", path, record);
CreateMode mode = CreateMode.PERSISTENT;
- byte[] bytes = serviceRecordMarshal.toByteswithHeader(record);
+ byte[] bytes = serviceRecordMarshal.toBytes(record);
zkSet(path, mode, bytes, getClientAcls(),
((flags & BindFlags.OVERWRITE) != 0));
}
@@ -114,7 +118,11 @@ public void bind(String path,
@Override
public ServiceRecord resolve(String path) throws IOException {
byte[] bytes = zkRead(path);
- return serviceRecordMarshal.fromBytesWithHeader(path, bytes);
+
+ ServiceRecord record = serviceRecordMarshal.fromBytes(path,
+ bytes, ServiceRecord.RECORD_TYPE);
+ RegistryTypeUtils.validateServiceRecord(path, record);
+ return record;
}
@Override
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/AddressTypes.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/AddressTypes.java
index 192819c8d7dc6..36dbf0ce66e1d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/AddressTypes.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/AddressTypes.java
@@ -38,6 +38,8 @@ public interface AddressTypes {
* </pre>
*/
public static final String ADDRESS_HOSTNAME_AND_PORT = "host/port";
+ public static final String ADDRESS_HOSTNAME_FIELD = "host";
+ public static final String ADDRESS_PORT_FIELD = "port";
/**
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/Endpoint.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/Endpoint.java
index 51418d9c9e5f6..e4effb42c8664 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/Endpoint.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/Endpoint.java
@@ -21,14 +21,16 @@
import com.google.common.base.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.registry.client.binding.JsonSerDeser;
import org.apache.hadoop.registry.client.binding.RegistryTypeUtils;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.codehaus.jackson.map.annotate.JsonSerialize;
import java.net.URI;
import java.util.ArrayList;
-import java.util.Collections;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
/**
* Description of a single service/component endpoint.
@@ -67,7 +69,7 @@ public final class Endpoint implements Cloneable {
/**
* a list of address tuples —tuples whose format depends on the address type
*/
- public List<List<String>> addresses;
+ public List<Map<String, String>> addresses;
/**
* Create an empty instance.
@@ -84,10 +86,11 @@ public Endpoint(Endpoint that) {
this.api = that.api;
this.addressType = that.addressType;
this.protocolType = that.protocolType;
- this.addresses = new ArrayList<List<String>>(that.addresses.size());
- for (List<String> address : addresses) {
- List<String> addr2 = new ArrayList<String>(address.size());
- Collections.copy(address, addr2);
+ this.addresses = newAddresses(that.addresses.size());
+ for (Map<String, String> address : that.addresses) {
+ Map<String, String> addr2 = new HashMap<String, String>(address.size());
+ addr2.putAll(address);
+ addresses.add(addr2);
}
}
@@ -101,16 +104,82 @@ public Endpoint(Endpoint that) {
public Endpoint(String api,
String addressType,
String protocolType,
- List<List<String>> addrs) {
+ List<Map<String, String>> addrs) {
this.api = api;
this.addressType = addressType;
this.protocolType = protocolType;
- this.addresses = new ArrayList<List<String>>();
+ this.addresses = newAddresses(0);
if (addrs != null) {
addresses.addAll(addrs);
}
}
+ /**
+ * Build an endpoint with an empty address list
+ * @param api API name
+ * @param addressType address type
+ * @param protocolType protocol type
+ */
+ public Endpoint(String api,
+ String addressType,
+ String protocolType) {
+ this.api = api;
+ this.addressType = addressType;
+ this.protocolType = protocolType;
+ this.addresses = newAddresses(0);
+ }
+
+ /**
+ * Build an endpoint with a single address entry.
+ * <p>
+ * This constructor is superfluous given the varags constructor is equivalent
+ * for a single element argument. However, type-erasure in java generics
+ * causes javac to warn about unchecked generic array creation. This
+ * constructor, which represents the common "one address" case, does
+ * not generate compile-time warnings.
+ * @param api API name
+ * @param addressType address type
+ * @param protocolType protocol type
+ * @param addr address. May be null —in which case it is not added
+ */
+ public Endpoint(String api,
+ String addressType,
+ String protocolType,
+ Map<String, String> addr) {
+ this(api, addressType, protocolType);
+ if (addr != null) {
+ addresses.add(addr);
+ }
+ }
+
+ /**
+ * Build an endpoint with a list of addresses
+ * @param api API name
+ * @param addressType address type
+ * @param protocolType protocol type
+ * @param addrs addresses. Null elements will be skipped
+ */
+ public Endpoint(String api,
+ String addressType,
+ String protocolType,
+ Map<String, String>...addrs) {
+ this(api, addressType, protocolType);
+ for (Map<String, String> addr : addrs) {
+ if (addr!=null) {
+ addresses.add(addr);
+ }
+ }
+ }
+
+ /**
+ * Create a new address structure of the requested size
+ * @param size size to create
+ * @return the new list
+ */
+ private List<Map<String, String>> newAddresses(int size) {
+ return new ArrayList<Map<String, String>>(size);
+ }
+
/**
* Build an endpoint from a list of URIs; each URI
* is ASCII-encoded and added to the list of addresses.
@@ -125,40 +194,16 @@ public Endpoint(String api,
this.addressType = AddressTypes.ADDRESS_URI;
this.protocolType = protocolType;
- List<List<String>> addrs = new ArrayList<List<String>>(uris.length);
+ List<Map<String, String>> addrs = newAddresses(uris.length);
for (URI uri : uris) {
- addrs.add(RegistryTypeUtils.tuple(uri.toString()));
+ addrs.add(RegistryTypeUtils.uri(uri.toString()));
}
this.addresses = addrs;
}
@Override
public String toString() {
- final StringBuilder sb = new StringBuilder("Endpoint{");
- sb.append("api='").append(api).append('\'');
- sb.append(", addressType='").append(addressType).append('\'');
- sb.append(", protocolType='").append(protocolType).append('\'');
-
- sb.append(", addresses=");
- if (addresses != null) {
- sb.append("[ ");
- for (List<String> address : addresses) {
- sb.append("[ ");
- if (address == null) {
- sb.append("NULL entry in address list");
- } else {
- for (String elt : address) {
- sb.append('"').append(elt).append("\" ");
- }
- }
- sb.append("] ");
- };
- sb.append("] ");
- } else {
- sb.append("(null) ");
- }
- sb.append('}');
- return sb.toString();
+ return marshalToString.toString(this);
}
/**
@@ -173,7 +218,7 @@ public void validate() {
Preconditions.checkNotNull(addressType, "null addressType field");
Preconditions.checkNotNull(protocolType, "null protocolType field");
Preconditions.checkNotNull(addresses, "null addresses field");
- for (List<String> address : addresses) {
+ for (Map<String, String> address : addresses) {
Preconditions.checkNotNull(address, "null element in address");
}
}
@@ -184,7 +229,19 @@ public void validate() {
* @throws CloneNotSupportedException
*/
@Override
- protected Object clone() throws CloneNotSupportedException {
+ public Object clone() throws CloneNotSupportedException {
return super.clone();
}
+
+
+ /**
+ * Static instance of service record marshalling
+ */
+ private static class Marshal extends JsonSerDeser<Endpoint> {
+ private Marshal() {
+ super(Endpoint.class);
+ }
+ }
+
+ private static final Marshal marshalToString = new Marshal();
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/ProtocolTypes.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/ProtocolTypes.java
index f225cf087753b..b836b0003c7dc 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/ProtocolTypes.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/ProtocolTypes.java
@@ -34,15 +34,10 @@ public interface ProtocolTypes {
String PROTOCOL_FILESYSTEM = "hadoop/filesystem";
/**
- * Classic Hadoop IPC : {@value}.
+ * Hadoop IPC, "classic" or protobuf : {@value}.
*/
String PROTOCOL_HADOOP_IPC = "hadoop/IPC";
- /**
- * Hadoop protocol buffers IPC: {@value}.
- */
- String PROTOCOL_HADOOP_IPC_PROTOBUF = "hadoop/protobuf";
-
/**
* Corba IIOP: {@value}.
*/
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/ServiceRecord.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/ServiceRecord.java
index 378127fc026c2..9403d3168e2d5 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/ServiceRecord.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/ServiceRecord.java
@@ -21,6 +21,7 @@
import com.google.common.base.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.registry.client.exceptions.InvalidRecordException;
import org.codehaus.jackson.annotate.JsonAnyGetter;
import org.codehaus.jackson.annotate.JsonAnySetter;
import org.codehaus.jackson.map.annotate.JsonSerialize;
@@ -40,6 +41,17 @@
@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
public class ServiceRecord implements Cloneable {
+ /**
+ * A type string which MUST be in the serialized json. This permits
+ * fast discarding of invalid entries
+ */
+ public static final String RECORD_TYPE = "JSONServiceRecord";
+
+ /**
+ * The type field. This must be the string {@link #RECORD_TYPE}
+ */
+ public String type = RECORD_TYPE;
+
/**
* Description string
*/
@@ -233,17 +245,5 @@ protected Object clone() throws CloneNotSupportedException {
return super.clone();
}
- /**
- * Validate the record by checking for null fields and other invalid
- * conditions
- * @throws NullPointerException if a field is null when it
- * MUST be set.
- * @throws RuntimeException on invalid entries
- */
- public void validate() {
- for (Endpoint endpoint : external) {
- Preconditions.checkNotNull("null endpoint", endpoint);
- endpoint.validate();
- }
- }
+
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/ServiceRecordHeader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/ServiceRecordHeader.java
deleted file mode 100644
index 2f75dba5a3357..0000000000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/types/ServiceRecordHeader.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.registry.client.types;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-/**
- * Service record header; access to the byte array kept private
- * to avoid findbugs warnings of mutability
- */
[email protected]
[email protected]
-public class ServiceRecordHeader {
- /**
- * Header of a service record: "jsonservicerec"
- * By making this over 12 bytes long, we can auto-determine which entries
- * in a listing are too short to contain a record without getting their data
- */
- private static final byte[] RECORD_HEADER = {
- 'j', 's', 'o', 'n',
- 's', 'e', 'r', 'v', 'i', 'c', 'e',
- 'r', 'e', 'c'
- };
-
- /**
- * Get the length of the record header
- * @return the header length
- */
- public static int getLength() {
- return RECORD_HEADER.length;
- }
-
- /**
- * Get a clone of the record header
- * @return the new record header.
- */
- public static byte[] getData() {
- byte[] h = new byte[RECORD_HEADER.length];
- System.arraycopy(RECORD_HEADER, 0, h, 0, RECORD_HEADER.length);
- return h;
- }
-}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/tla/yarnregistry.tla b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/tla/yarnregistry.tla
index 1c19adead446c..a950475f402e5 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/tla/yarnregistry.tla
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/tla/yarnregistry.tla
@@ -4,6 +4,7 @@ EXTENDS FiniteSets, Sequences, Naturals, TLC
(*
+============================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -19,6 +20,7 @@ EXTENDS FiniteSets, Sequences, Naturals, TLC
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+============================================================================
*)
(*
@@ -71,13 +73,22 @@ CONSTANTS
MknodeActions \* all possible mkdir actions
+ASSUME PathChars \in STRING
+ASSUME Paths \in STRING
+
+(* Data in records is JSON, hence a string *)
+ASSUME Data \in STRING
+
+----------------------------------------------------------------------------------------
(* the registry*)
VARIABLE registry
+
(* Sequence of actions to apply to the registry *)
VARIABLE actions
+
----------------------------------------------------------------------------------------
(* Tuple of all variables. *)
@@ -92,7 +103,6 @@ vars == << registry, actions >>
(* Persistence policy *)
PersistPolicySet == {
- "", \* Undefined; field not present. PERMANENT is implied.
"permanent", \* persists until explicitly removed
"application", \* persists until the application finishes
"application-attempt", \* persists until the application attempt finishes
@@ -104,7 +114,6 @@ TypeInvariant ==
/\ \A p \in PersistPolicies: p \in PersistPolicySet
-
----------------------------------------------------------------------------------------
@@ -129,6 +138,14 @@ RegistryEntry == [
]
+(* Define the set of all string to string mappings *)
+
+StringMap == [
+ STRING |-> STRING
+]
+
+
+
(*
An endpoint in a service record
*)
@@ -140,21 +157,14 @@ Endpoint == [
addresses: Addresses
]
-(* Attributes are the set of all string to string mappings *)
-
-Attributes == [
-STRING |-> STRING
-]
-
(*
A service record
*)
ServiceRecord == [
- \* ID -used when applying the persistence policy
- yarn_id: STRING,
- \* the persistence policy
- yarn_persistence: PersistPolicySet,
+ \* This MUST be present: if it is not then the data is not a service record
+ \* This permits shortcut scan & reject of byte arrays without parsing
+ type: "JSONServiceRecord",
\*A description
description: STRING,
@@ -166,9 +176,34 @@ ServiceRecord == [
internal: Endpoints,
\* Attributes are a function
- attributes: Attributes
+ attributes: StringMap
]
+----------------------------------------------------------------------------------------
+
+(*
+ There is an operation serialize whose internals are not defined,
+ Which converts the service records to JSON
+ *)
+
+CONSTANT serialize(_)
+
+(* A function which returns true iff the byte stream is considered a valid service record. *)
+CONSTANT containsServiceRecord(_)
+
+(* A function to deserialize a string to JSON *)
+CONSTANT deserialize(_)
+
+ASSUME \A json \in STRING: containsServiceRecord(json) \in BOOLEAN
+
+(* Records can be serialized *)
+ASSUME \A r \in ServiceRecord : serialize(r) \in STRING /\ containsServiceRecord(serialize(r))
+
+(* All strings for which containsServiceRecord() holds can be deserialized *)
+ASSUME \A json \in STRING: containsServiceRecord(json) => deserialize(json) \in ServiceRecord
+
+
+
----------------------------------------------------------------------------------------
@@ -304,8 +339,8 @@ validRegistry(R) ==
\* an entry must be the root entry or have a parent entry
/\ \A e \in R: isRootEntry(e) \/ exists(R, parent(e.path))
- \* If the entry has data, it must be a service record
- /\ \A e \in R: (e.data = << >> \/ e.data \in ServiceRecords)
+ \* If the entry has data, it must contain a service record
+ /\ \A e \in R: (e.data = << >> \/ containsServiceRecord(e.data))
----------------------------------------------------------------------------------------
@@ -336,13 +371,13 @@ mknode() adds a new empty entry where there was none before, iff
*)
mknodeSimple(R, path) ==
- LET record == [ path |-> path, data |-> <<>> ]
+ LET entry == [ path |-> path, data |-> <<>> ]
IN \/ exists(R, path)
- \/ (exists(R, parent(path)) /\ canBind(R, record) /\ (R' = R \union {record} ))
+ \/ (exists(R, parent(path)) /\ canBind(R, entry) /\ (R' = R \union {entry} ))
(*
-For all parents, the mknodeSimpl() criteria must apply.
+For all parents, the mknodeSimple() criteria must apply.
This could be defined recursively, though as TLA+ does not support recursion,
an alternative is required
@@ -350,7 +385,8 @@ an alternative is required
Because this specification is declaring the final state of a operation, not
the implemental, all that is needed is to describe those parents.
-It declares that the mkdirSimple state applies to the path and all its parents in the set R'
+It declares that the mknodeSimple() state applies to the path and all
+its parents in the set R'
*)
mknodeWithParents(R, path) ==
@@ -402,7 +438,7 @@ purge(R, path, id, persistence) ==
=> recursiveDelete(R, p2.path)
(*
-resolveRecord() resolves the record at a path or fails.
+resolveEntry() resolves the record entry at a path or fails.
It relies on the fact that if the cardinality of a set is 1, then the CHOOSE operator
is guaranteed to return the single entry of that set, iff the choice predicate holds.
@@ -411,19 +447,28 @@ Using a predicate of TRUE, it always succeeds, so this function selects
the sole entry of the resolve operation.
*)
-resolveRecord(R, path) ==
+resolveEntry(R, path) ==
LET l == resolve(R, path) IN
/\ Cardinality(l) = 1
/\ CHOOSE e \in l : TRUE
+(*
+ Resolve a record by resolving the entry and deserializing the result
+ *)
+resolveRecord(R, path) ==
+ deserialize(resolveEntry(R, path))
+
+
(*
The specific action of putting an entry into a record includes validating the record
*)
validRecordToBind(path, record) ==
\* The root entry must have permanent persistence
- isRootPath(path) => (record.attributes["yarn:persistence"] = "permanent"
- \/ record.attributes["yarn:persistence"] = "")
+ isRootPath(path) => (
+ record.attributes["yarn:persistence"] = "permanent"
+ \/ record.attributes["yarn:persistence"]
+ \/ record.attributes["yarn:persistence"] = {})
(*
@@ -432,13 +477,12 @@ marshalled as the data in the entry
*)
bindRecord(R, path, record) ==
/\ validRecordToBind(path, record)
- /\ bind(R, [path |-> path, data |-> record])
+ /\ bind(R, [path |-> path, data |-> serialize(record)])
----------------------------------------------------------------------------------------
-
(*
The action queue can only contain one of the sets of action types, and
by giving each a unique name, those sets are guaranteed to be disjoint
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/RegistryTestHelper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/RegistryTestHelper.java
index 460ecad876a1f..91602e1d3b3e2 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/RegistryTestHelper.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/RegistryTestHelper.java
@@ -20,7 +20,6 @@
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.Shell;
import org.apache.hadoop.registry.client.api.RegistryConstants;
import org.apache.hadoop.registry.client.binding.RegistryUtils;
import org.apache.hadoop.registry.client.binding.RegistryTypeUtils;
@@ -46,11 +45,7 @@
import java.util.List;
import java.util.Map;
-import static org.apache.hadoop.registry.client.binding.RegistryTypeUtils.inetAddrEndpoint;
-import static org.apache.hadoop.registry.client.binding.RegistryTypeUtils.ipcEndpoint;
-import static org.apache.hadoop.registry.client.binding.RegistryTypeUtils.restEndpoint;
-import static org.apache.hadoop.registry.client.binding.RegistryTypeUtils.tuple;
-import static org.apache.hadoop.registry.client.binding.RegistryTypeUtils.webEndpoint;
+import static org.apache.hadoop.registry.client.binding.RegistryTypeUtils.*;
/**
* This is a set of static methods to aid testing the registry operations.
@@ -61,18 +56,18 @@ public class RegistryTestHelper extends Assert {
public static final String SC_HADOOP = "org-apache-hadoop";
public static final String USER = "devteam/";
public static final String NAME = "hdfs";
- public static final String API_WEBHDFS = "org_apache_hadoop_namenode_webhdfs";
- public static final String API_HDFS = "org_apache_hadoop_namenode_dfs";
+ public static final String API_WEBHDFS = "classpath:org.apache.hadoop.namenode.webhdfs";
+ public static final String API_HDFS = "classpath:org.apache.hadoop.namenode.dfs";
public static final String USERPATH = RegistryConstants.PATH_USERS + USER;
public static final String PARENT_PATH = USERPATH + SC_HADOOP + "/";
public static final String ENTRY_PATH = PARENT_PATH + NAME;
- public static final String NNIPC = "nnipc";
- public static final String IPC2 = "IPC2";
+ public static final String NNIPC = "uuid:423C2B93-C927-4050-AEC6-6540E6646437";
+ public static final String IPC2 = "uuid:0663501D-5AD3-4F7E-9419-52F5D6636FCF";
private static final Logger LOG =
LoggerFactory.getLogger(RegistryTestHelper.class);
- public static final String KTUTIL = "ktutil";
private static final RegistryUtils.ServiceRecordMarshal recordMarshal =
new RegistryUtils.ServiceRecordMarshal();
+ public static final String HTTP_API = "http://";
/**
* Assert the path is valid by ZK rules
@@ -148,9 +143,9 @@ public static void validateEntry(ServiceRecord record) {
assertEquals(API_WEBHDFS, webhdfs.api);
assertEquals(AddressTypes.ADDRESS_URI, webhdfs.addressType);
assertEquals(ProtocolTypes.PROTOCOL_REST, webhdfs.protocolType);
- List<List<String>> addressList = webhdfs.addresses;
- List<String> url = addressList.get(0);
- String addr = url.get(0);
+ List<Map<String, String>> addressList = webhdfs.addresses;
+ Map<String, String> url = addressList.get(0);
+ String addr = url.get("uri");
assertTrue(addr.contains("http"));
assertTrue(addr.contains(":8020"));
@@ -159,8 +154,9 @@ public static void validateEntry(ServiceRecord record) {
nnipc.protocolType);
Endpoint ipc2 = findEndpoint(record, IPC2, false, 1,2);
+ assertNotNull(ipc2);
- Endpoint web = findEndpoint(record, "web", true, 1, 1);
+ Endpoint web = findEndpoint(record, HTTP_API, true, 1, 1);
assertEquals(1, web.addresses.size());
assertEquals(1, web.addresses.get(0).size());
}
@@ -275,14 +271,14 @@ public static ServiceRecord buildExampleServiceEntry(String persistence) throws
public static void addSampleEndpoints(ServiceRecord entry, String hostname)
throws URISyntaxException {
assertNotNull(hostname);
- entry.addExternalEndpoint(webEndpoint("web",
+ entry.addExternalEndpoint(webEndpoint(HTTP_API,
new URI("http", hostname + ":80", "/")));
entry.addExternalEndpoint(
restEndpoint(API_WEBHDFS,
new URI("http", hostname + ":8020", "/")));
- Endpoint endpoint = ipcEndpoint(API_HDFS, true, null);
- endpoint.addresses.add(tuple(hostname, "8030"));
+ Endpoint endpoint = ipcEndpoint(API_HDFS, null);
+ endpoint.addresses.add(RegistryTypeUtils.hostnamePortPair(hostname, 8030));
entry.addInternalEndpoint(endpoint);
InetSocketAddress localhost = new InetSocketAddress("localhost", 8050);
entry.addInternalEndpoint(
@@ -290,9 +286,7 @@ public static void addSampleEndpoints(ServiceRecord entry, String hostname)
8050));
entry.addInternalEndpoint(
RegistryTypeUtils.ipcEndpoint(
- IPC2,
- true,
- RegistryTypeUtils.marshall(localhost)));
+ IPC2, localhost));
}
/**
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/client/binding/TestMarshalling.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/client/binding/TestMarshalling.java
index 14e3b1fa631ea..f1814d30707c0 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/client/binding/TestMarshalling.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/client/binding/TestMarshalling.java
@@ -19,9 +19,9 @@
package org.apache.hadoop.registry.client.binding;
import org.apache.hadoop.registry.RegistryTestHelper;
+import org.apache.hadoop.registry.client.exceptions.InvalidRecordException;
import org.apache.hadoop.registry.client.exceptions.NoRecordException;
import org.apache.hadoop.registry.client.types.ServiceRecord;
-import org.apache.hadoop.registry.client.types.ServiceRecordHeader;
import org.apache.hadoop.registry.client.types.yarn.PersistencePolicies;
import org.junit.BeforeClass;
import org.junit.Rule;
@@ -31,8 +31,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.io.EOFException;
-
/**
* Test record marshalling
*/
@@ -44,6 +42,7 @@ public class TestMarshalling extends RegistryTestHelper {
public final Timeout testTimeout = new Timeout(10000);
@Rule
public TestName methodName = new TestName();
+
private static RegistryUtils.ServiceRecordMarshal marshal;
@BeforeClass
@@ -55,42 +54,65 @@ public static void setupClass() {
public void testRoundTrip() throws Throwable {
String persistence = PersistencePolicies.PERMANENT;
ServiceRecord record = createRecord(persistence);
- record.set("customkey","customvalue");
- record.set("customkey2","customvalue2");
+ record.set("customkey", "customvalue");
+ record.set("customkey2", "customvalue2");
+ RegistryTypeUtils.validateServiceRecord("", record);
LOG.info(marshal.toJson(record));
byte[] bytes = marshal.toBytes(record);
- ServiceRecord r2 = marshal.fromBytes("", bytes, 0);
+ ServiceRecord r2 = marshal.fromBytes("", bytes);
assertMatches(record, r2);
+ RegistryTypeUtils.validateServiceRecord("", r2);
}
- @Test
- public void testRoundTripHeaders() throws Throwable {
- ServiceRecord record = createRecord(PersistencePolicies.CONTAINER);
- byte[] bytes = marshal.toByteswithHeader(record);
- ServiceRecord r2 = marshal.fromBytesWithHeader("", bytes);
- assertMatches(record, r2);
+ @Test(expected = NoRecordException.class)
+ public void testUnmarshallNoData() throws Throwable {
+ marshal.fromBytes("src", new byte[]{});
}
@Test(expected = NoRecordException.class)
- public void testRoundTripBadHeaders() throws Throwable {
- ServiceRecord record = createRecord(PersistencePolicies.APPLICATION);
- byte[] bytes = marshal.toByteswithHeader(record);
- bytes[1] = 0x01;
- marshal.fromBytesWithHeader("src", bytes);
+ public void testUnmarshallNotEnoughData() throws Throwable {
+ // this is nominally JSON -but without the service record header
+ marshal.fromBytes("src", new byte[]{'{','}'}, ServiceRecord.RECORD_TYPE);
+ }
+
+ @Test(expected = InvalidRecordException.class)
+ public void testUnmarshallNoBody() throws Throwable {
+ byte[] bytes = "this is not valid JSON at all and should fail".getBytes();
+ marshal.fromBytes("src", bytes);
+ }
+
+ @Test(expected = InvalidRecordException.class)
+ public void testUnmarshallWrongType() throws Throwable {
+ byte[] bytes = "{'type':''}".getBytes();
+ ServiceRecord serviceRecord = marshal.fromBytes("marshalling", bytes);
+ RegistryTypeUtils.validateServiceRecord("validating", serviceRecord);
}
@Test(expected = NoRecordException.class)
- public void testUnmarshallHeaderTooShort() throws Throwable {
- marshal.fromBytesWithHeader("src", new byte[]{'a'});
+ public void testUnmarshallWrongLongType() throws Throwable {
+ ServiceRecord record = new ServiceRecord();
+ record.type = "ThisRecordHasALongButNonMatchingType";
+ byte[] bytes = marshal.toBytes(record);
+ ServiceRecord serviceRecord = marshal.fromBytes("marshalling",
+ bytes, ServiceRecord.RECORD_TYPE);
}
- @Test(expected = EOFException.class)
- public void testUnmarshallNoBody() throws Throwable {
- byte[] bytes = ServiceRecordHeader.getData();
- marshal.fromBytesWithHeader("src", bytes);
+ @Test(expected = NoRecordException.class)
+ public void testUnmarshallNoType() throws Throwable {
+ ServiceRecord record = new ServiceRecord();
+ record.type = "NoRecord";
+ byte[] bytes = marshal.toBytes(record);
+ ServiceRecord serviceRecord = marshal.fromBytes("marshalling",
+ bytes, ServiceRecord.RECORD_TYPE);
}
+ @Test(expected = InvalidRecordException.class)
+ public void testRecordValidationWrongType() throws Throwable {
+ ServiceRecord record = new ServiceRecord();
+ record.type = "NotAServiceRecordType";
+ RegistryTypeUtils.validateServiceRecord("validating", record);
+ }
@Test
public void testUnknownFieldsRoundTrip() throws Throwable {
@@ -102,8 +124,8 @@ public void testUnknownFieldsRoundTrip() throws Throwable {
assertEquals("2", record.get("intval"));
assertNull(record.get("null"));
assertEquals("defval", record.get("null", "defval"));
- byte[] bytes = marshal.toByteswithHeader(record);
- ServiceRecord r2 = marshal.fromBytesWithHeader("", bytes);
+ byte[] bytes = marshal.toBytes(record);
+ ServiceRecord r2 = marshal.fromBytes("", bytes);
assertEquals("value", r2.get("key"));
assertEquals("2", r2.get("intval"));
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/operations/TestRegistryOperations.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/operations/TestRegistryOperations.java
index 7a7f88cd51cb3..853d7f179095f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/operations/TestRegistryOperations.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/operations/TestRegistryOperations.java
@@ -23,6 +23,7 @@
import org.apache.hadoop.fs.PathNotFoundException;
import org.apache.hadoop.registry.AbstractRegistryTest;
import org.apache.hadoop.registry.client.api.BindFlags;
+import org.apache.hadoop.registry.client.binding.RegistryTypeUtils;
import org.apache.hadoop.registry.client.binding.RegistryUtils;
import org.apache.hadoop.registry.client.binding.RegistryPathUtils;
import org.apache.hadoop.registry.client.exceptions.NoRecordException;
@@ -91,10 +92,8 @@ public void testLsParent() throws Throwable {
childStats.values());
assertEquals(1, records.size());
ServiceRecord record = records.get(ENTRY_PATH);
- assertNotNull(record);
- record.validate();
+ RegistryTypeUtils.validateServiceRecord(ENTRY_PATH, record);
assertMatches(written, record);
-
}
@Test
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/registry/yarn-registry.md b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/registry/yarn-registry.md
index a2a5009660fe6..b38d9fba5ba25 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/registry/yarn-registry.md
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/registry/yarn-registry.md
@@ -352,6 +352,10 @@ application.
<td>Name</td>
<td>Description</td>
</tr>
+ <tr>
+ <td>type: String</td>
+ <td>Always: "JSONServiceRecord"</td>
+ </tr>
<tr>
<td>description: String</td>
<td>Human-readable description.</td>
@@ -366,6 +370,8 @@ application.
</tr>
</table>
+The type field MUST be `"JSONServiceRecord"`. Mandating this string allows future record types *and* permits rapid rejection of byte arrays that lack this string before attempting JSON parsing.
+
### YARN Persistence policies
The YARN Resource Manager integration integrates cleanup of service records
@@ -379,7 +385,6 @@ any use of the registry without the RM's participation.
The attributes, `yarn:id` and `yarn:persistence` specify which records
*and any child entries* may be deleted as the associated YARN components complete.
-
The `yarn:id` field defines the application, attempt or container ID to match;
the `yarn:persistence` attribute defines the trigger for record cleanup, and
implicitly the type of the contents of the `yarn:id` field.
@@ -432,31 +437,32 @@ up according the lifecycle of that application.
<td>Description</td>
</tr>
<tr>
- <td>addresses: List[List[String]]</td>
- <td>a list of address tuples whose format depends on the address type</td>
- </tr>
- <tr>
- <td>addressType: String</td>
- <td>format of the binding</td>
- </tr>
+ <td>api: URI as String</td>
+ <td>API implemented at the end of the binding</td>
<tr>
<td>protocol: String</td>
<td>Protocol. Examples:
`http`, `https`, `hadoop-rpc`, `zookeeper`, `web`, `REST`, `SOAP`, ...</td>
</tr>
<tr>
- <td>api: String</td>
- <td>API implemented at the end of the binding</td>
+ <td>addressType: String</td>
+ <td>format of the binding</td>
</tr>
+ </tr>
+ <tr>
+ <td>addresses: List[Map[String, String]]</td>
+ <td>a list of address maps</td>
+ </tr>
+
</table>
All string fields have a limit on size, to dissuade services from hiding
complex JSON structures in the text description.
-### Field: Address Type
+#### Field `addressType`: Address Type
-The addressType field defines the string format of entries.
+The `addressType` field defines the string format of entries.
Having separate types is that tools (such as a web viewer) can process binding
strings without having to recognize the protocol.
@@ -467,43 +473,58 @@ strings without having to recognize the protocol.
<td>binding format</td>
</tr>
<tr>
- <td>`url`</td>
- <td>`[URL]`</td>
+ <td>uri</td>
+ <td>uri:URI of endpoint</td>
</tr>
<tr>
- <td>`hostname`</td>
- <td>`[hostname]`</td>
+ <td>hostname</td>
+ <td>hostname: service host</td>
</tr>
<tr>
- <td>`inetaddress`</td>
- <td>`[hostname, port]`</td>
+ <td>inetaddress</td>
+ <td>hostname: service host, port: service port</td>
</tr>
<tr>
- <td>`path`</td>
- <td>`[/path/to/something]`</td>
+ <td>path</td>
+ <td>path: generic unix filesystem path</td>
</tr>
<tr>
- <td>`zookeeper`</td>
- <td>`[quorum-entry, path]`</td>
+ <td>zookeeper</td>
+ <td>hostname: service host, port: service port, path: ZK path</td>
</tr>
</table>
-An actual zookeeper binding consists of a list of `hostname:port` bindings –the
-quorum— and the path within. In the proposed schema, every quorum entry will be
-listed as a triple of `[hostname, port, path]`. Client applications do not
-expect the path to de be different across the quorum. The first entry in the
-list of quorum hosts MUST define the path to be used by all clients. Later
-entries SHOULD list the same path, though clients MUST ignore these.
+In the zookeeper binding, every entry represents a single node in quorum,
+the `hostname` and `port` fields defining the hostname of the ZK instance
+and the port on which it is listening. The `path` field lists zookeeper path
+for applications to use. For example, for HBase this would refer to the znode
+containing information about the HBase cluster.
+
+The path MUST be identical across all address elements in the `addresses` list.
+This ensures that any single address contains enough information to connect
+to the quorum and connect to the relevant znode.
New Address types may be defined; if not standard please prefix with the
character sequence `"x-"`.
-#### **Field: API**
+### Field `api`: API identifier
+
+The API field MUST contain a URI that identifies the specific API of an endpoint.
+These MUST be unique to an API to avoid confusion.
+
+The following strategies are suggested to provide unique URIs for an API
+
+1. The SOAP/WS-* convention of using the URL to where the WSDL defining the service
+2. A URL to the svn/git hosted document defining a REST API
+3. the `classpath` schema followed by a path to a class or package in an application.
+4. The `uuid` schema with a generated UUID.
+
+It is hoped that standard API URIs will be defined for common APIs. Two such non-normative APIs are used in this document
+
+* `http://` : A web site for humans
+* `classpath:javax.management.jmx`: and endpoint supporting the JMX management protocol (RMI-based)
-APIs may be unique to a service class, or may be common across by service
-classes. They MUST be given unique names. These MAY be based on service
-packages but MAY be derived from other naming schemes:
### Examples of Service Entries
@@ -524,12 +545,14 @@ overall application. It exports the URL to a load balancer.
{
"description" : "tomcat-based web application",
- "registrationTime" : 1408638082444,
"external" : [ {
- "api" : "www",
+ "api" : "http://internal.example.org/restapis/scheduler/20141026v1",
"addressType" : "uri",
- "protocolType" : "REST",
- "addresses" : [ [ "http://loadbalancer/" ] [ "http://loadbalancer2/" ] ]
+ "protocol" : "REST",
+ "addresses" : [
+ { "uri" : "http://loadbalancer/" },
+ { "uri" : "http://loadbalancer2/" }
+ ]
} ],
"internal" : [ ]
}
@@ -545,21 +568,23 @@ will trigger the deletion of this entry
/users/devteam/org-apache-tomcat/test1/components/container-1408631738011-0001-01-000001
{
- "registrationTime" : 1408638082445,
"yarn:id" : "container_1408631738011_0001_01_000001",
- "yarn:persistence" : "3",
- "description" : null,
+ "yarn:persistence" : "container",
+ "description" : "",
"external" : [ {
- "api" : "www",
+ "api" : "http://internal.example.org/restapis/scheduler/20141026v1",
"addressType" : "uri",
- "protocolType" : "REST",
- "addresses" : [ [ "http://rack4server3:43572" ] ]
+ "protocol" : "REST",
+ "addresses" : [{ "uri" : "rack4server3:43572" } ]
} ],
"internal" : [ {
- "api" : "jmx",
+ "api" : "classpath:javax.management.jmx",
"addressType" : "host/port",
- "protocolType" : "JMX",
- "addresses" : [ [ "rack4server3", "43573" ] ]
+ "protocol" : "rmi",
+ "addresses" : [ {
+ "host" : "rack4server3",
+ "port" : "48551"
+ } ]
} ]
}
@@ -571,19 +596,22 @@ external endpoint, the JMX addresses as internal.
{
"registrationTime" : 1408638082445,
"yarn:id" : "container_1408631738011_0001_01_000002",
- "yarn:persistence" : "3",
+ "yarn:persistence" : "container",
"description" : null,
"external" : [ {
- "api" : "www",
+ "api" : "http://internal.example.org/restapis/scheduler/20141026v1",
"addressType" : "uri",
- "protocolType" : "REST",
+ "protocol" : "REST",
"addresses" : [ [ "http://rack1server28:35881" ] ]
} ],
"internal" : [ {
- "api" : "jmx",
+ "api" : "classpath:javax.management.jmx",
"addressType" : "host/port",
- "protocolType" : "JMX",
- "addresses" : [ [ "rack1server28", "35882" ] ]
+ "protocol" : "rmi",
+ "addresses" : [ {
+ "host" : "rack1server28",
+ "port" : "48551"
+ } ]
} ]
}
@@ -887,3 +915,106 @@ Implementations may throttle update operations.
**Rate of Polling**
Clients which poll the registry may be throttled.
+
+# Complete service record example
+
+Below is a (non-normative) example of a service record retrieved
+from a YARN application.
+
+
+ {
+ "type" : "JSONServiceRecord",
+ "description" : "Slider Application Master",
+ "yarn:persistence" : "application",
+ "yarn:id" : "application_1414052463672_0028",
+ "external" : [ {
+ "api" : "classpath:org.apache.slider.appmaster",
+ "addressType" : "host/port",
+ "protocol" : "hadoop/IPC",
+ "addresses" : [ {
+ "port" : "48551",
+ "host" : "nn.example.com"
+ } ]
+ }, {
+ "api" : "http://",
+ "addressType" : "uri",
+ "protocol" : "web",
+ "addresses" : [ {
+ "uri" : "http://nn.example.com:40743"
+ } ]
+ }, {
+ "api" : "classpath:org.apache.slider.management",
+ "addressType" : "uri",
+ "protocol" : "REST",
+ "addresses" : [ {
+ "uri" : "http://nn.example.com:40743/ws/v1/slider/mgmt"
+ } ]
+ }, {
+ "api" : "classpath:org.apache.slider.publisher",
+ "addressType" : "uri",
+ "protocol" : "REST",
+ "addresses" : [ {
+ "uri" : "http://nn.example.com:40743/ws/v1/slider/publisher"
+ } ]
+ }, {
+ "api" : "classpath:org.apache.slider.registry",
+ "addressType" : "uri",
+ "protocol" : "REST",
+ "addresses" : [ {
+ "uri" : "http://nn.example.com:40743/ws/v1/slider/registry"
+ } ]
+ }, {
+ "api" : "classpath:org.apache.slider.publisher.configurations",
+ "addressType" : "uri",
+ "protocol" : "REST",
+ "addresses" : [ {
+ "uri" : "http://nn.example.com:40743/ws/v1/slider/publisher/slider"
+ } ]
+ }, {
+ "api" : "classpath:org.apache.slider.publisher.exports",
+ "addressType" : "uri",
+ "protocol" : "REST",
+ "addresses" : [ {
+ "uri" : "http://nn.example.com:40743/ws/v1/slider/publisher/exports"
+ } ]
+ } ],
+ "internal" : [ {
+ "api" : "classpath:org.apache.slider.agents.secure",
+ "addressType" : "uri",
+ "protocol" : "REST",
+ "addresses" : [ {
+ "uri" : "https://nn.example.com:52705/ws/v1/slider/agents"
+ } ]
+ }, {
+ "api" : "classpath:org.apache.slider.agents.oneway",
+ "addressType" : "uri",
+ "protocol" : "REST",
+ "addresses" : [ {
+ "uri" : "https://nn.example.com:33425/ws/v1/slider/agents"
+ } ]
+ } ]
+ }
+
+It publishes a number of endpoints, both internal and external.
+
+External:
+
+1. The IPC hostname and port for client-AM communications
+1. URL to the AM's web UI
+1. A series of REST URLs under the web UI for specific application services.
+The details are irrelevant —note that they use an application-specific API
+value to ensure uniqueness.
+
+Internal:
+1. Two URLS to REST APIs offered by the AM for containers deployed by
+ the application itself.
+
+Python agents running in the containers retrieve the internal endpoint
+URLs to communicate with their AM. The record is resolved on container startup
+and cached until communications problems occur. At that point the registry is
+queried for the current record, then an attempt is made to reconnect to the AM.
+
+Here "connectivity" problems means both "low level socket/IO errors" and
+"failures in HTTPS authentication". The agents use two-way HTTPS authentication
+—if the AM fails and another application starts listening on the same ports
+it will trigger an authentication failure and hence service record reread.
|
91517fad3eae6b93ded1cb16a518b1a37ec06e5c
|
restlet-framework-java
|
Fixed potential NPE when the product name is null.- Reported by Vincent Ricard.--
|
c
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/com.noelios.restlet/src/com/noelios/restlet/Engine.java b/modules/com.noelios.restlet/src/com/noelios/restlet/Engine.java
index 99e1c21761..7dbcab4ed7 100644
--- a/modules/com.noelios.restlet/src/com/noelios/restlet/Engine.java
+++ b/modules/com.noelios.restlet/src/com/noelios/restlet/Engine.java
@@ -582,7 +582,7 @@ public String formatUserAgent(List<Product> products)
.hasNext();) {
final Product product = iterator.next();
if ((product.getName() == null)
- && (product.getName().length() == 0)) {
+ || (product.getName().length() == 0)) {
throw new IllegalArgumentException(
"Product name cannot be null.");
}
diff --git a/modules/org.restlet.gwt/src/org/restlet/gwt/internal/Engine.java b/modules/org.restlet.gwt/src/org/restlet/gwt/internal/Engine.java
index b52fa867bb..df932134a8 100644
--- a/modules/org.restlet.gwt/src/org/restlet/gwt/internal/Engine.java
+++ b/modules/org.restlet.gwt/src/org/restlet/gwt/internal/Engine.java
@@ -208,7 +208,7 @@ public String formatUserAgent(List<Product> products)
.hasNext();) {
final Product product = iterator.next();
if ((product.getName() == null)
- && (product.getName().length() == 0)) {
+ || (product.getName().length() == 0)) {
throw new IllegalArgumentException(
"Product name cannot be null.");
}
|
0dd95079938476608211f34c414b90f9eca45f77
|
camel
|
CAMEL-1712 Upgraded the camel-ibatis to JUnit4--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@785119 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/camel
|
diff --git a/components/camel-ibatis/pom.xml b/components/camel-ibatis/pom.xml
index e512bfbff1e42..d243937f4e6e0 100644
--- a/components/camel-ibatis/pom.xml
+++ b/components/camel-ibatis/pom.xml
@@ -57,8 +57,7 @@
<!-- testing -->
<dependency>
<groupId>org.apache.camel</groupId>
- <artifactId>camel-core</artifactId>
- <type>test-jar</type>
+ <artifactId>camel-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
diff --git a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisBatchConsumerTest.java b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisBatchConsumerTest.java
index c8869b8c8e4ea..c9ba157fd8338 100644
--- a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisBatchConsumerTest.java
+++ b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisBatchConsumerTest.java
@@ -19,12 +19,14 @@
import org.apache.camel.Exchange;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
+import org.junit.Test;
/**
* @version $Revision$
*/
public class IBatisBatchConsumerTest extends IBatisTestSupport {
+ @Test
public void testBatchConsumer() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(2);
diff --git a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisPollingDelayRouteTest.java b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisPollingDelayRouteTest.java
index c6845708750d1..6bf1ef0aaf749 100644
--- a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisPollingDelayRouteTest.java
+++ b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisPollingDelayRouteTest.java
@@ -19,15 +19,19 @@
import java.sql.Connection;
import java.sql.Statement;
-import org.apache.camel.ContextTestSupport;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
+import org.apache.camel.test.junit4.CamelTestSupport;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
/**
* @version $Revision$
*/
-public class IBatisPollingDelayRouteTest extends ContextTestSupport {
+public class IBatisPollingDelayRouteTest extends CamelTestSupport {
+ @Test
public void testSendAccountBean() throws Exception {
createTestData();
@@ -67,7 +71,8 @@ public void configure() throws Exception {
}
@Override
- protected void setUp() throws Exception {
+ @Before
+ public void setUp() throws Exception {
super.setUp();
// lets create the database...
@@ -78,7 +83,8 @@ protected void setUp() throws Exception {
}
@Override
- protected void tearDown() throws Exception {
+ @After
+ public void tearDown() throws Exception {
Connection connection = createConnection();
Statement statement = connection.createStatement();
statement.execute("drop table ACCOUNT");
diff --git a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForDeleteTest.java b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForDeleteTest.java
index 2b2327d0d7d13..953bfb04cba74 100644
--- a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForDeleteTest.java
+++ b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForDeleteTest.java
@@ -18,12 +18,14 @@
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
+import org.junit.Test;
/**
* @version $Revision$
*/
public class IBatisQueryForDeleteTest extends IBatisTestSupport {
+ @Test
public void testDelete() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
@@ -42,7 +44,8 @@ public void testDelete() throws Exception {
rows = template.requestBody("ibatis:count?statementType=QueryForObject", null, Integer.class);
assertEquals("There should be 0 rows", 0, rows.intValue());
}
-
+
+ @Test
public void testDeleteNotFound() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
diff --git a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForInsertTest.java b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForInsertTest.java
index 3a0d6e58ada09..b232df1c05cb1 100644
--- a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForInsertTest.java
+++ b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForInsertTest.java
@@ -18,12 +18,14 @@
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
+import org.junit.Test;
/**
* @version $Revision$
*/
public class IBatisQueryForInsertTest extends IBatisTestSupport {
+ @Test
public void testInsert() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
diff --git a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForListTest.java b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForListTest.java
index 4286be054de30..ffdd7883105de 100644
--- a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForListTest.java
+++ b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForListTest.java
@@ -20,12 +20,14 @@
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
+import org.junit.Test;
/**
* @version $Revision$
*/
public class IBatisQueryForListTest extends IBatisTestSupport {
+ @Test
public void testQueryForList() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
diff --git a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForListWithSplitTest.java b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForListWithSplitTest.java
index e6d1883088eae..c74ad19e7cd27 100644
--- a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForListWithSplitTest.java
+++ b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForListWithSplitTest.java
@@ -18,12 +18,14 @@
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
+import org.junit.Test;
/**
* @version $Revision$
*/
public class IBatisQueryForListWithSplitTest extends IBatisTestSupport {
+ @Test
public void testQueryForList() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(2);
diff --git a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForObjectTest.java b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForObjectTest.java
index b5e8efb07fe50..8229dde693060 100644
--- a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForObjectTest.java
+++ b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForObjectTest.java
@@ -18,12 +18,14 @@
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
+import org.junit.Test;
/**
* @version $Revision$
*/
public class IBatisQueryForObjectTest extends IBatisTestSupport {
+ @Test
public void testQueryForObject() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
@@ -37,6 +39,7 @@ public void testQueryForObject() throws Exception {
assertEquals("Claus", account.getFirstName());
}
+ @Test
public void testQueryForNotFound() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
diff --git a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForUpdateTest.java b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForUpdateTest.java
index a5318c6d5ab71..838c3421265e5 100644
--- a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForUpdateTest.java
+++ b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueryForUpdateTest.java
@@ -18,12 +18,14 @@
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
+import org.junit.Test;
/**
* @version $Revision$
*/
public class IBatisQueryForUpdateTest extends IBatisTestSupport {
+ @Test
public void testUpdate() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
diff --git a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueueTest.java b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueueTest.java
index 89b74ccc022c1..85ed3088868e1 100644
--- a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueueTest.java
+++ b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisQueueTest.java
@@ -20,12 +20,16 @@
import java.sql.Statement;
import java.util.List;
-import org.apache.camel.ContextTestSupport;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
+import org.apache.camel.test.junit4.CamelTestSupport;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
-public class IBatisQueueTest extends ContextTestSupport {
+public class IBatisQueueTest extends CamelTestSupport {
+ @Test
public void testConsume() throws Exception {
MockEndpoint endpoint = getMockEndpoint("mock:results");
@@ -76,7 +80,8 @@ public void configure() throws Exception {
}
@Override
- protected void setUp() throws Exception {
+ @Before
+ public void setUp() throws Exception {
super.setUp();
// lets create the database...
@@ -88,7 +93,8 @@ protected void setUp() throws Exception {
}
@Override
- protected void tearDown() throws Exception {
+ @After
+ public void tearDown() throws Exception {
super.tearDown();
IBatisEndpoint endpoint = resolveMandatoryEndpoint("ibatis:Account", IBatisEndpoint.class);
Connection connection = endpoint.getSqlMapClient().getDataSource().getConnection();
diff --git a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisRouteEmptyResultSetTest.java b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisRouteEmptyResultSetTest.java
index 52141b52714d8..ba443f5a2327b 100644
--- a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisRouteEmptyResultSetTest.java
+++ b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisRouteEmptyResultSetTest.java
@@ -20,15 +20,19 @@
import java.sql.Statement;
import java.util.ArrayList;
-import org.apache.camel.ContextTestSupport;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
+import org.apache.camel.test.junit4.CamelTestSupport;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
/**
* @version $Revision$
*/
-public class IBatisRouteEmptyResultSetTest extends ContextTestSupport {
+public class IBatisRouteEmptyResultSetTest extends CamelTestSupport {
+ @Test
public void testRouteEmptyResultSet() throws Exception {
MockEndpoint endpoint = getMockEndpoint("mock:results");
endpoint.expectedMinimumMessageCount(1);
@@ -51,7 +55,8 @@ public void configure() throws Exception {
}
@Override
- protected void setUp() throws Exception {
+ @Before
+ public void setUp() throws Exception {
super.setUp();
// lets create the database...
@@ -62,7 +67,8 @@ protected void setUp() throws Exception {
}
@Override
- protected void tearDown() throws Exception {
+ @After
+ public void tearDown() throws Exception {
Connection connection = createConnection();
Statement statement = connection.createStatement();
statement.execute("drop table ACCOUNT");
diff --git a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisRouteTest.java b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisRouteTest.java
index d7591985f8683..a964ab6679ffb 100644
--- a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisRouteTest.java
+++ b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisRouteTest.java
@@ -20,15 +20,19 @@
import java.sql.Statement;
import java.util.List;
-import org.apache.camel.ContextTestSupport;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
+import org.apache.camel.test.junit4.CamelTestSupport;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
/**
* @version $Revision$
*/
-public class IBatisRouteTest extends ContextTestSupport {
+public class IBatisRouteTest extends CamelTestSupport {
+ @Test
public void testSendAccountBean() throws Exception {
MockEndpoint endpoint = getMockEndpoint("mock:results");
endpoint.expectedMinimumMessageCount(1);
@@ -68,7 +72,8 @@ public void configure() throws Exception {
}
@Override
- protected void setUp() throws Exception {
+ @Before
+ public void setUp() throws Exception {
super.setUp();
// lets create the database...
@@ -79,7 +84,8 @@ protected void setUp() throws Exception {
}
@Override
- protected void tearDown() throws Exception {
+ @After
+ public void tearDown() throws Exception {
Connection connection = createConnection();
Statement statement = connection.createStatement();
statement.execute("drop table ACCOUNT");
diff --git a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisTestSupport.java b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisTestSupport.java
index 6c0190c24ce37..518cb205dd70b 100644
--- a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisTestSupport.java
+++ b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisTestSupport.java
@@ -19,12 +19,16 @@
import java.sql.Connection;
import java.sql.Statement;
-import org.apache.camel.ContextTestSupport;
+import org.apache.camel.test.junit4.CamelTestSupport;
+import org.junit.After;
+import org.junit.Before;
-public class IBatisTestSupport extends ContextTestSupport {
+
+public class IBatisTestSupport extends CamelTestSupport {
@Override
- protected void setUp() throws Exception {
+ @Before
+ public void setUp() throws Exception {
super.setUp();
// lets create the database...
@@ -50,7 +54,8 @@ protected void setUp() throws Exception {
}
@Override
- protected void tearDown() throws Exception {
+ @After
+ public void tearDown() throws Exception {
Connection connection = createConnection();
Statement statement = connection.createStatement();
statement.execute("drop table ACCOUNT");
diff --git a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisUnknownStatementTypeTest.java b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisUnknownStatementTypeTest.java
index 91f415313f91d..2e00582db406a 100644
--- a/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisUnknownStatementTypeTest.java
+++ b/components/camel-ibatis/src/test/java/org/apache/camel/component/ibatis/IBatisUnknownStatementTypeTest.java
@@ -17,15 +17,17 @@
package org.apache.camel.component.ibatis;
import org.apache.camel.CamelExecutionException;
-import org.apache.camel.ContextTestSupport;
import org.apache.camel.FailedToCreateProducerException;
import org.apache.camel.builder.RouteBuilder;
+import org.apache.camel.test.junit4.CamelTestSupport;
+import org.junit.Test;
/**
* @version $Revision$
*/
-public class IBatisUnknownStatementTypeTest extends ContextTestSupport {
+public class IBatisUnknownStatementTypeTest extends CamelTestSupport {
+ @Test
public void testStatementTypeNotSet() throws Exception {
try {
template.sendBody("direct:start", "Hello");
|
6397f775178eedf37663c0b28e863b3b74feb277
|
drools
|
JBRULES-313: adding halt command--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@12902 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
a
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java b/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java
index 832c1923745..08dd4a3e144 100644
--- a/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java
+++ b/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java
@@ -2921,6 +2921,30 @@ public void testMapAccess() throws Exception {
assertTrue( list.contains( map ) );
}
+
+ public void testHalt() throws Exception {
+ final PackageBuilder builder = new PackageBuilder();
+ builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_halt.drl" ) ) );
+ final Package pkg = builder.getPackage();
+
+ final RuleBase ruleBase = getRuleBase();
+ ruleBase.addPackage( pkg );
+ final WorkingMemory workingMemory = ruleBase.newStatefulSession();
+
+ final List results = new ArrayList();
+ workingMemory.setGlobal( "results",
+ results );
+
+ workingMemory.insert( new Integer( 0 ) );
+ workingMemory.fireAllRules();
+
+ assertEquals( 10,
+ results.size() );
+ for( int i = 0; i < 10; i++ ) {
+ assertEquals( new Integer( i ), results.get( i ) );
+ }
+ }
+
}
\ No newline at end of file
diff --git a/drools-compiler/src/test/resources/org/drools/integrationtests/test_halt.drl b/drools-compiler/src/test/resources/org/drools/integrationtests/test_halt.drl
new file mode 100644
index 00000000000..6dc0e94ac06
--- /dev/null
+++ b/drools-compiler/src/test/resources/org/drools/integrationtests/test_halt.drl
@@ -0,0 +1,20 @@
+package org.drools;
+
+global java.util.List results;
+
+rule "fire"
+when
+ $old : Number( $val : intValue )
+then
+ results.add( $old );
+ insert( new Integer( $val + 1 ) );
+ retract( $old );
+end
+
+rule "stop"
+ salience 10
+when
+ Number( intValue == 10 )
+then
+ drools.halt();
+end
\ No newline at end of file
diff --git a/drools-core/src/main/java/org/drools/WorkingMemory.java b/drools-core/src/main/java/org/drools/WorkingMemory.java
index fadc9099c45..9887e14b991 100644
--- a/drools-core/src/main/java/org/drools/WorkingMemory.java
+++ b/drools-core/src/main/java/org/drools/WorkingMemory.java
@@ -311,4 +311,11 @@ public void modifyInsert(final FactHandle factHandle,
* Starts a new process instance for the process with the given id.
*/
ProcessInstance startProcess(String processId);
+
+ /**
+ * Stops rule firing after the currect rule finishes executing
+ *
+ */
+ public void halt();
+
}
\ No newline at end of file
diff --git a/drools-core/src/main/java/org/drools/base/DefaultKnowledgeHelper.java b/drools-core/src/main/java/org/drools/base/DefaultKnowledgeHelper.java
index ab4c095feef..c8a2cfa4a7a 100644
--- a/drools-core/src/main/java/org/drools/base/DefaultKnowledgeHelper.java
+++ b/drools-core/src/main/java/org/drools/base/DefaultKnowledgeHelper.java
@@ -198,4 +198,8 @@ public void setFocus(final String focus) {
public Declaration getDeclaration(final String identifier) {
return (Declaration) this.subrule.getOuterDeclarations().get( identifier );
}
+
+ public void halt() {
+ this.workingMemory.halt();
+ }
}
diff --git a/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java b/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java
index 91da739c8ac..52d6cea401e 100644
--- a/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java
+++ b/drools-core/src/main/java/org/drools/common/AbstractWorkingMemory.java
@@ -145,6 +145,8 @@ public abstract class AbstractWorkingMemory
/** Flag to determine if a rule is currently being fired. */
protected boolean firing;
+
+ protected boolean halt;
// ------------------------------------------------------------
// Constructors
@@ -394,6 +396,10 @@ public RuleBase getRuleBase() {
return this.ruleBase;
}
+ public void halt() {
+ this.halt = true;
+ }
+
/**
* @see WorkingMemory
*/
@@ -405,7 +411,8 @@ public synchronized void fireAllRules(final AgendaFilter agendaFilter) throws Fa
// If we're already firing a rule, then it'll pick up
// the firing for any other assertObject(..) that get
// nested inside, avoiding concurrent-modification
- // exceptions, depending on code paths of the actions.
+ // exceptions, depending on code paths of the actions.
+ this.halt = false;
if ( isSequential() ) {
for ( Iterator it = this.liaPropagations.iterator(); it.hasNext(); ) {
@@ -423,7 +430,7 @@ public synchronized void fireAllRules(final AgendaFilter agendaFilter) throws Fa
try {
this.firing = true;
- while ( this.agenda.fireNextItem( agendaFilter ) ) {
+ while ( (!halt) && this.agenda.fireNextItem( agendaFilter ) ) {
noneFired = false;
if ( !this.actionQueue.isEmpty() ) {
executeQueuedActions();
@@ -450,7 +457,7 @@ private void doOtherwise(final AgendaFilter agendaFilter) {
executeQueuedActions();
}
- while ( this.agenda.fireNextItem( agendaFilter ) ) {
+ while ( (!halt) && this.agenda.fireNextItem( agendaFilter ) ) {
;
}
diff --git a/drools-core/src/main/java/org/drools/common/InternalWorkingMemoryActions.java b/drools-core/src/main/java/org/drools/common/InternalWorkingMemoryActions.java
index 5699f9aff65..0c124843e04 100644
--- a/drools-core/src/main/java/org/drools/common/InternalWorkingMemoryActions.java
+++ b/drools-core/src/main/java/org/drools/common/InternalWorkingMemoryActions.java
@@ -52,4 +52,5 @@ public void modifyInsert(final FactHandle factHandle,
final Object object,
final Rule rule,
final Activation activation);
+
}
\ No newline at end of file
diff --git a/drools-core/src/main/java/org/drools/spi/KnowledgeHelper.java b/drools-core/src/main/java/org/drools/spi/KnowledgeHelper.java
index 250a8e1acb8..9b2711ed96a 100644
--- a/drools-core/src/main/java/org/drools/spi/KnowledgeHelper.java
+++ b/drools-core/src/main/java/org/drools/spi/KnowledgeHelper.java
@@ -17,11 +17,9 @@
*/
import java.io.Serializable;
-import java.util.List;
import org.drools.FactException;
import org.drools.FactHandle;
-import org.drools.QueryResults;
import org.drools.WorkingMemory;
import org.drools.rule.Declaration;
import org.drools.rule.Rule;
@@ -134,5 +132,7 @@ public void modifyInsert(final FactHandle factHandle,
// void setFocus(AgendaGroup focus);
public Declaration getDeclaration(String identifier);
+
+ public void halt();
}
\ No newline at end of file
|
8276661ff3a71d4fcf50f72dc377f92f17f1d889
|
intellij-community
|
Ruby test runner: Statistcis can be sort by- "Tests" column--
|
a
|
https://github.com/JetBrains/intellij-community
|
diff --git a/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/BaseColumn.java b/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/BaseColumn.java
new file mode 100644
index 0000000000000..ef931199a2be3
--- /dev/null
+++ b/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/BaseColumn.java
@@ -0,0 +1,34 @@
+package org.jetbrains.plugins.ruby.testing.testunit.runner.ui.statistics;
+
+import org.jetbrains.plugins.ruby.testing.testunit.runner.RTestUnitTestProxy;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+import com.intellij.util.ui.ColumnInfo;
+import com.intellij.util.NullableFunction;
+
+import java.util.List;
+
+/**
+ * @author Roman Chernyatchik
+ */
+public abstract class BaseColumn extends ColumnInfo<RTestUnitTestProxy, String> {
+ private NullableFunction<List<RTestUnitTestProxy>, Object> oldSortFun =
+ new NullableFunction<List<RTestUnitTestProxy>, Object>() {
+ @Nullable
+ public Object fun(final List<RTestUnitTestProxy> proxies) {
+ BaseColumn.super.sort(proxies);
+
+ return null;
+ }
+ };
+
+ public BaseColumn(String name) {
+ super(name);
+ }
+
+ @Override
+ public void sort(@NotNull final List<RTestUnitTestProxy> rTestUnitTestProxies) {
+ //Invariant: comparator should left Total(initally at row = 0) row as uppermost element!
+ RTestUnitStatisticsTableModel.applySortOperation(rTestUnitTestProxies, oldSortFun);
+ }
+}
diff --git a/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/ColumnDuration.java b/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/ColumnDuration.java
index 9b556f6a576fc..d26cb91067298 100644
--- a/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/ColumnDuration.java
+++ b/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/ColumnDuration.java
@@ -2,7 +2,6 @@
import com.intellij.ui.ColoredTableCellRenderer;
import com.intellij.ui.SimpleTextAttributes;
-import com.intellij.util.ui.ColumnInfo;
import org.jetbrains.plugins.ruby.RBundle;
import org.jetbrains.plugins.ruby.testing.testunit.runner.RTestUnitTestProxy;
import org.jetbrains.plugins.ruby.testing.testunit.runner.ui.TestsPresentationUtil;
@@ -13,7 +12,7 @@
/**
* @author Roman Chernyatchik
*/
-public class ColumnDuration extends ColumnInfo<RTestUnitTestProxy, String> {
+public class ColumnDuration extends BaseColumn {
public ColumnDuration() {
super(RBundle.message("ruby.test.runner.ui.tabs.statistics.columns.duration.title"));
}
@@ -22,7 +21,16 @@ public String valueOf(final RTestUnitTestProxy testProxy) {
return TestsPresentationUtil.getDurationPresentation(testProxy);
}
- //TODO sort
+ //@Nullable
+ //public Comparator<RTestUnitTestProxy> getComparator(){
+ // return new Comparator<RTestUnitTestProxy>() {
+ // public int compare(final RTestUnitTestProxy o1, final RTestUnitTestProxy o2) {
+ // //Invariant: comparator should left Total row as uppermost element!
+ //
+ // }
+ // };
+ //}
+
@Override
public TableCellRenderer getRenderer(final RTestUnitTestProxy proxy) {
diff --git a/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/ColumnResults.java b/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/ColumnResults.java
index 2eade62a0001e..7fbe23de85616 100644
--- a/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/ColumnResults.java
+++ b/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/ColumnResults.java
@@ -1,7 +1,6 @@
package org.jetbrains.plugins.ruby.testing.testunit.runner.ui.statistics;
import com.intellij.ui.ColoredTableCellRenderer;
-import com.intellij.util.ui.ColumnInfo;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.plugins.ruby.RBundle;
import org.jetbrains.plugins.ruby.testing.testunit.runner.RTestUnitTestProxy;
@@ -13,7 +12,7 @@
/**
* @author Roman Chernyatchik
*/
-public class ColumnResults extends ColumnInfo<RTestUnitTestProxy, String> {
+public class ColumnResults extends BaseColumn {
@NonNls private static final String UNDERFINED = "<underfined>";
////TODO sort
diff --git a/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/ColumnTest.java b/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/ColumnTest.java
index 2c994d1fd3c82..a764ba9cea3bd 100644
--- a/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/ColumnTest.java
+++ b/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/ColumnTest.java
@@ -2,19 +2,20 @@
import com.intellij.ui.ColoredTableCellRenderer;
import com.intellij.ui.SimpleTextAttributes;
-import com.intellij.util.ui.ColumnInfo;
-import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.NonNls;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.ruby.RBundle;
import org.jetbrains.plugins.ruby.testing.testunit.runner.RTestUnitTestProxy;
import javax.swing.*;
import javax.swing.table.TableCellRenderer;
+import java.util.Comparator;
/**
* @author Roman Chernyatchik
*/
-public class ColumnTest extends ColumnInfo<RTestUnitTestProxy, String> {
+public class ColumnTest extends BaseColumn {
public ColumnTest() {
super(RBundle.message("ruby.test.runner.ui.tabs.statistics.columns.test.title"));
}
@@ -24,6 +25,15 @@ public String valueOf(final RTestUnitTestProxy testProxy) {
return testProxy.getPresentableName();
}
+ @Nullable
+ public Comparator<RTestUnitTestProxy> getComparator(){
+ return new Comparator<RTestUnitTestProxy>() {
+ public int compare(final RTestUnitTestProxy o1, final RTestUnitTestProxy o2) {
+ return o1.getName().compareTo(o2.getName());
+ }
+ };
+ }
+
@Override
public TableCellRenderer getRenderer(final RTestUnitTestProxy proxy) {
return new TestsCellRenderer(proxy);
diff --git a/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/RTestUnitStatisticsTableModel.java b/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/RTestUnitStatisticsTableModel.java
index bdd1de2a4fccc..61784c26943d2 100644
--- a/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/RTestUnitStatisticsTableModel.java
+++ b/plugins/ruby/src/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/RTestUnitStatisticsTableModel.java
@@ -1,6 +1,8 @@
package org.jetbrains.plugins.ruby.testing.testunit.runner.ui.statistics;
+import com.intellij.openapi.diagnostic.Logger;
import com.intellij.ui.table.TableView;
+import com.intellij.util.NullableFunction;
import com.intellij.util.ui.ListTableModel;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
@@ -18,8 +20,20 @@
* @author Roman Chernyatchik
*/
public class RTestUnitStatisticsTableModel extends ListTableModel<RTestUnitTestProxy> {
+ private static final Logger LOG = Logger.getInstance(RTestUnitStatisticsTableModel.class.getName());
+
private RTestUnitTestProxy myCurrentSuite;
+ private NullableFunction<List<RTestUnitTestProxy>, Object> oldReverseModelItemsFun =
+ new NullableFunction<List<RTestUnitTestProxy>, Object>() {
+ @Nullable
+ public Object fun(final List<RTestUnitTestProxy> proxies) {
+ RTestUnitStatisticsTableModel.super.reverseModelItems(proxies);
+
+ return null;
+ }
+ };
+
public RTestUnitStatisticsTableModel() {
super(new ColumnTest(), new ColumnDuration(), new ColumnResults());
}
@@ -47,6 +61,7 @@ public void onSelectedRequest(@Nullable final RTestUnitTestProxy proxy) {
private void findAndSelectInTable(final RTestUnitTestProxy proxy, final TableView<RTestUnitTestProxy> statisticsTableView) {
UIUtil.addToInvokeLater(new Runnable() {
public void run() {
+ //TODO reimplement
final String presentableName = proxy.getPresentableName();
final int rowCount = statisticsTableView.getRowCount();
final int columnnCount = statisticsTableView.getColumnCount();
@@ -61,6 +76,22 @@ public void run() {
});
}
+ // public TestProxy getTestAt(final int rowIndex) {
+ // if (rowIndex < 0 || rowIndex > getItems().size())
+ // return null;
+ // return (rowIndex == 0) ? myTest : (TestProxy)getItems().get(rowIndex - 1);
+ //}
+ //
+ //public int getIndexOf(final Object test) {
+ // if (test == myTest)
+ // return 0;
+ // for (int i = 0; i < getItems().size(); i++) {
+ // final Object child = getItems().get(i);
+ // if (child == test) return i + 1;
+ // }
+ // return -1;
+ //}
+
private void updateModel() {
UIUtil.addToInvokeLater(new Runnable() {
public void run() {
@@ -85,6 +116,32 @@ private List<RTestUnitTestProxy> getItemsForSuite(@Nullable final RTestUnitTestP
return list;
}
+ @Override
+ protected void reverseModelItems(final List<RTestUnitTestProxy> rTestUnitTestProxies) {
+ //Invariant: comparator should left Total(initally at row = 0) row as uppermost element!
+ applySortOperation(rTestUnitTestProxies, oldReverseModelItemsFun);
+ }
+
+ /**
+ * This function allow sort operation to all except first element(e.g. Total row)
+ * @param proxies Tests or suites
+ * @param sortOperation Closure
+ */
+ protected static void applySortOperation(final List<RTestUnitTestProxy> proxies,
+ final NullableFunction<List<RTestUnitTestProxy>, Object> sortOperation) {
+
+ //Invariant: comparator should left Total(initally at row = 0) row as uppermost element!
+ final int size = proxies.size();
+ if (size > 1) {
+ sortOperation.fun(proxies.subList(1, size));
+ }
+ }
+
+ public void setValueAt(final Object aValue, final int rowIndex, final int columnIndex) {
+ // Setting value is prevented!
+ LOG.assertTrue(false, "value: " + aValue + " row: " + rowIndex + " column: " + columnIndex);
+ }
+
@Nullable
private RTestUnitTestProxy getCurrentSuiteFor(@Nullable final RTestUnitTestProxy proxy) {
if (proxy == null) {
diff --git a/plugins/ruby/testSrc/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/RTestUnitStatisticsTableModelTest.java b/plugins/ruby/testSrc/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/RTestUnitStatisticsTableModelTest.java
index 3df61c2f2a3a1..48d1cc60bab16 100644
--- a/plugins/ruby/testSrc/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/RTestUnitStatisticsTableModelTest.java
+++ b/plugins/ruby/testSrc/org/jetbrains/plugins/ruby/testing/testunit/runner/ui/statistics/RTestUnitStatisticsTableModelTest.java
@@ -1,5 +1,6 @@
package org.jetbrains.plugins.ruby.testing.testunit.runner.ui.statistics;
+import com.intellij.util.ui.SortableColumnModel;
import org.jetbrains.plugins.ruby.testing.testunit.runner.BaseRUnitTestsTestCase;
import org.jetbrains.plugins.ruby.testing.testunit.runner.RTestUnitEventsListener;
import org.jetbrains.plugins.ruby.testing.testunit.runner.RTestUnitTestProxy;
@@ -227,6 +228,28 @@ public void testOnTestFinished_Other() {
assertSameElements(getItems(), myRootSuite, test1, suite);
}
+ public void testSort_ColumnTest() {
+ final RTestUnitTestProxy firstSuite = createSuiteProxy("K_suite1", myRootSuite);
+ final RTestUnitTestProxy lastSuite = createSuiteProxy("L_suite1", myRootSuite);
+ final RTestUnitTestProxy firstTest = createTestProxy("A_test", myRootSuite);
+ final RTestUnitTestProxy lastTest = createTestProxy("Z_test", myRootSuite);
+
+ mySelectionListener.onSelectedRequest(myRootSuite);
+ assertOrderedEquals(getItems(), myRootSuite, firstTest, firstSuite, lastSuite, lastTest);
+
+ //sort with another sort type
+ myStatisticsTableModel.sortByColumn(2, SortableColumnModel.SORT_ASCENDING);
+ //resort
+ myStatisticsTableModel.sortByColumn(0, SortableColumnModel.SORT_ASCENDING);
+ assertOrderedEquals(getItems(), myRootSuite, firstTest, firstSuite, lastSuite, lastTest);
+ //reverse
+ myStatisticsTableModel.sortByColumn(0, SortableColumnModel.SORT_DESCENDING);
+ assertOrderedEquals(getItems(), myRootSuite, lastTest, lastSuite, firstSuite, firstTest);
+ //direct
+ myStatisticsTableModel.sortByColumn(0, SortableColumnModel.SORT_ASCENDING);
+ assertOrderedEquals(getItems(), myRootSuite, firstTest, firstSuite, lastSuite, lastTest);
+ }
+
private List<RTestUnitTestProxy> getItems() {
return myStatisticsTableModel.getItems();
}
|
291e15e330c9b71a0d0238e9d56d4b2a473356af
|
camel
|
CAMEL-1665 fixed the unit test error--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@781238 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/test/java/org/apache/camel/processor/TransformToTest.java b/camel-core/src/test/java/org/apache/camel/processor/TransformToTest.java
index b61690f5dd763..48def13c95e83 100644
--- a/camel-core/src/test/java/org/apache/camel/processor/TransformToTest.java
+++ b/camel-core/src/test/java/org/apache/camel/processor/TransformToTest.java
@@ -52,7 +52,7 @@ public void testTransformToInvalidEndpoint() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
- from("direct:bar").transform(to("bar"));
+ from("direct:bar").transform(sendTo("bar"));
}
});
context.start();
@@ -71,7 +71,7 @@ protected RouteBuilder createRouteBuilder() throws Exception {
@Override
public void configure() throws Exception {
from("direct:start")
- .transform(to("direct:foo")).to("mock:result");
+ .transform(sendTo("direct:foo")).to("mock:result");
from("direct:foo").process(new Processor() {
public void process(Exchange exchange) throws Exception {
|
edbbc6ec7beb37bbc2d7c0773821c6b766397c2c
|
kotlin
|
Unused method removed--
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/binding/CodegenBinding.java b/compiler/backend/src/org/jetbrains/jet/codegen/binding/CodegenBinding.java
index f30b166d06ef4..79e4284f5e1cc 100644
--- a/compiler/backend/src/org/jetbrains/jet/codegen/binding/CodegenBinding.java
+++ b/compiler/backend/src/org/jetbrains/jet/codegen/binding/CodegenBinding.java
@@ -236,7 +236,8 @@ public static void registerClassNameForScript(BindingTrace bindingTrace, @NotNul
registerClassNameForScript(bindingTrace, descriptor, className);
}
- @NotNull public static Collection<JetFile> allFilesInNamespaces(BindingContext bindingContext, Collection<JetFile> files) {
+ @NotNull
+ public static Collection<JetFile> allFilesInNamespaces(BindingContext bindingContext, Collection<JetFile> files) {
// todo: we use Set and add given files but ignoring other scripts because something non-clear kept in binding
// for scripts especially in case of REPL
@@ -275,12 +276,6 @@ public int compare(JetFile first, JetFile second) {
return sortedAnswer;
}
- public static boolean isMultiFileNamespace(BindingContext bindingContext, FqName fqName) {
- NamespaceDescriptor namespaceDescriptor = bindingContext.get(BindingContext.FQNAME_TO_NAMESPACE_DESCRIPTOR, fqName);
- Collection<JetFile> jetFiles = bindingContext.get(NAMESPACE_TO_FILES, namespaceDescriptor);
- return jetFiles != null && jetFiles.size() > 1;
- }
-
public static boolean isObjectLiteral(BindingContext bindingContext, ClassDescriptor declaration) {
PsiElement psiElement = descriptorToDeclaration(bindingContext, declaration);
if (psiElement instanceof JetObjectDeclaration && ((JetObjectDeclaration) psiElement).isObjectLiteral()) {
|
f7edaa99bc232ad970faf6369ebc3cf1f6eff70f
|
camel
|
added contains() method to the DSL for- .header("foo").contains("cheese") for multi-value headers--git-svn-id: https://svn.apache.org/repos/asf/activemq/camel/trunk@539516 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/main/java/org/apache/camel/builder/ValueBuilder.java b/camel-core/src/main/java/org/apache/camel/builder/ValueBuilder.java
index cf8e84ec82147..4e20c97f180d9 100644
--- a/camel-core/src/main/java/org/apache/camel/builder/ValueBuilder.java
+++ b/camel-core/src/main/java/org/apache/camel/builder/ValueBuilder.java
@@ -104,6 +104,20 @@ public Predicate<E> isNotNull() {
return onNewPredicate(PredicateBuilder.isNotNull(expression));
}
+ /**
+ * Create a predicate that the left hand expression contains the value of the right hand expression
+ *
+ * @param value the element which is compared to be contained within this expression
+ * @return a predicate which evaluates to true if the given value expression is contained within this
+ * expression value
+ */
+ @Fluent
+ public Predicate<E> contains(@FluentArg("value")Object value) {
+ Expression<E> right = asExpression(value);
+ return onNewPredicate(PredicateBuilder.contains(expression, right));
+ }
+
+
/**
* Creates a predicate which is true if this expression matches the given regular expression
*
|
8210d3091f64a3de385ce6f01314322620f82735
|
kotlin
|
Initial implementation of KT-6427 Completion to- use Java name suggestion to complete function parameters (+ filtered out- synthetic Kotlin classes from completion)--
|
a
|
https://github.com/JetBrains/kotlin
|
diff --git a/core/util.runtime/src/org/jetbrains/kotlin/utils/addToStdlib.kt b/core/util.runtime/src/org/jetbrains/kotlin/utils/addToStdlib.kt
index f6a66580a18d0..a4956fc30ae7c 100644
--- a/core/util.runtime/src/org/jetbrains/kotlin/utils/addToStdlib.kt
+++ b/core/util.runtime/src/org/jetbrains/kotlin/utils/addToStdlib.kt
@@ -95,3 +95,13 @@ public fun <T : Any> constant(calculator: () -> T): T {
}
private val constantMap = ConcurrentHashMap<Function0<*>, Any>()
+
+public fun String.indexOfOrNull(char: Char, startIndex: Int = 0, ignoreCase: Boolean = false): Int? {
+ val index = indexOf(char, startIndex, ignoreCase)
+ return if (index >= 0) index else null
+}
+
+public fun String.lastIndexOfOrNull(char: Char, startIndex: Int = 0, ignoreCase: Boolean = false): Int? {
+ val index = lastIndexOf(char, startIndex, ignoreCase)
+ return if (index >= 0) index else null
+}
diff --git a/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/AllClassesCompletion.kt b/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/AllClassesCompletion.kt
index 5695484ee29d4..e85703308ead7 100644
--- a/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/AllClassesCompletion.kt
+++ b/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/AllClassesCompletion.kt
@@ -19,8 +19,10 @@ package org.jetbrains.kotlin.idea.completion
import com.intellij.codeInsight.completion.AllClassesGetter
import com.intellij.codeInsight.completion.CompletionParameters
import com.intellij.codeInsight.completion.PrefixMatcher
+import com.intellij.psi.PsiClass
import com.intellij.psi.search.GlobalSearchScope
import org.jetbrains.kotlin.asJava.KotlinLightClass
+import org.jetbrains.kotlin.descriptors.ClassDescriptor
import org.jetbrains.kotlin.descriptors.ClassKind
import org.jetbrains.kotlin.descriptors.DeclarationDescriptor
import org.jetbrains.kotlin.descriptors.ModuleDescriptor
@@ -31,34 +33,28 @@ import org.jetbrains.kotlin.platform.JavaToKotlinClassMap
import org.jetbrains.kotlin.psi.JetFile
import org.jetbrains.kotlin.resolve.BindingContext
-class AllClassesCompletion(val parameters: CompletionParameters,
- val lookupElementFactory: LookupElementFactory,
- val resolutionFacade: ResolutionFacade,
- val bindingContext: BindingContext,
- val moduleDescriptor: ModuleDescriptor,
- val scope: GlobalSearchScope,
- val prefixMatcher: PrefixMatcher,
- val kindFilter: (ClassKind) -> Boolean,
- val visibilityFilter: (DeclarationDescriptor) -> Boolean) {
- fun collect(result: LookupElementsCollector) {
+class AllClassesCompletion(private val parameters: CompletionParameters,
+ private val kotlinIndicesHelper: KotlinIndicesHelper,
+ private val prefixMatcher: PrefixMatcher,
+ private val kindFilter: (ClassKind) -> Boolean) {
+ fun collect(classDescriptorCollector: (ClassDescriptor) -> Unit, javaClassCollector: (PsiClass) -> Unit) {
//TODO: this is a temporary hack until we have built-ins in indices
val builtIns = JavaToKotlinClassMap.INSTANCE.allKotlinClasses()
val filteredBuiltIns = builtIns.filter { kindFilter(it.getKind()) && prefixMatcher.prefixMatches(it.getName().asString()) }
- result.addDescriptorElements(filteredBuiltIns, suppressAutoInsertion = true)
+ filteredBuiltIns.forEach { classDescriptorCollector(it) }
- val project = parameters.getOriginalFile().getProject()
- val helper = KotlinIndicesHelper(project, resolutionFacade, bindingContext, scope, moduleDescriptor, visibilityFilter)
- result.addDescriptorElements(helper.getClassDescriptors({ prefixMatcher.prefixMatches(it) }, kindFilter),
- suppressAutoInsertion = true)
+ kotlinIndicesHelper.getClassDescriptors({ prefixMatcher.prefixMatches(it) }, kindFilter).forEach { classDescriptorCollector(it) }
if (!ProjectStructureUtil.isJsKotlinModule(parameters.getOriginalFile() as JetFile)) {
- addAdaptedJavaCompletion(result)
+ addAdaptedJavaCompletion(javaClassCollector)
}
}
- private fun addAdaptedJavaCompletion(collector: LookupElementsCollector) {
+ private fun addAdaptedJavaCompletion(collector: (PsiClass) -> Unit) {
AllClassesGetter.processJavaClasses(parameters, prefixMatcher, true, { psiClass ->
if (psiClass!! !is KotlinLightClass) { // Kotlin class should have already been added as kotlin element before
+ if (psiClass.isSyntheticKotlinClass()) return@processJavaClasses // filter out synthetic classes produced by Kotlin compiler
+
val kind = when {
psiClass.isAnnotationType() -> ClassKind.ANNOTATION_CLASS
psiClass.isInterface() -> ClassKind.INTERFACE
@@ -66,9 +62,14 @@ class AllClassesCompletion(val parameters: CompletionParameters,
else -> ClassKind.CLASS
}
if (kindFilter(kind)) {
- collector.addElementWithAutoInsertionSuppressed(lookupElementFactory.createLookupElementForJavaClass(psiClass))
+ collector(psiClass)
}
}
})
}
+
+ private fun PsiClass.isSyntheticKotlinClass(): Boolean {
+ if (!getName().contains('$')) return false // optimization to not analyze annotations of all classes
+ return getModifierList()?.findAnnotation(javaClass<kotlin.jvm.internal.KotlinSyntheticClass>().getName()) != null
+ }
}
diff --git a/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/CompletionSession.kt b/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/CompletionSession.kt
index bf305d55203a0..c8f9564bb216a 100644
--- a/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/CompletionSession.kt
+++ b/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/CompletionSession.kt
@@ -128,7 +128,7 @@ abstract class CompletionSessionBase(protected val configuration: CompletionSess
protected val prefixMatcher: PrefixMatcher = this.resultSet.getPrefixMatcher()
- protected val referenceVariantsHelper: ReferenceVariantsHelper = ReferenceVariantsHelper(bindingContext, moduleDescriptor, project) { isVisibleDescriptor(it) }
+ protected val referenceVariantsHelper: ReferenceVariantsHelper = ReferenceVariantsHelper(bindingContext, moduleDescriptor, project, { isVisibleDescriptor(it) })
protected val receiversData: ReferenceVariantsHelper.ReceiversData? = reference?.let { referenceVariantsHelper.getReferenceVariantsReceivers(it.expression) }
@@ -168,7 +168,7 @@ abstract class CompletionSessionBase(protected val configuration: CompletionSess
}
protected val indicesHelper: KotlinIndicesHelper
- get() = KotlinIndicesHelper(project, resolutionFacade, bindingContext, searchScope, moduleDescriptor) { isVisibleDescriptor(it) }
+ get() = KotlinIndicesHelper(project, resolutionFacade, searchScope, moduleDescriptor, { isVisibleDescriptor(it) })
protected fun isVisibleDescriptor(descriptor: DeclarationDescriptor): Boolean {
if (descriptor is DeclarationDescriptorWithVisibility && inDescriptor != null) {
@@ -241,7 +241,7 @@ abstract class CompletionSessionBase(protected val configuration: CompletionSess
}
protected fun getTopLevelExtensions(): Collection<CallableDescriptor> {
- val descriptors = indicesHelper.getCallableTopLevelExtensions({ prefixMatcher.prefixMatches(it) }, reference!!.expression)
+ val descriptors = indicesHelper.getCallableTopLevelExtensions({ prefixMatcher.prefixMatches(it) }, reference!!.expression, bindingContext)
return filterShadowedNonImported(descriptors, reference)
}
@@ -250,10 +250,11 @@ abstract class CompletionSessionBase(protected val configuration: CompletionSess
}
protected fun addAllClasses(kindFilter: (ClassKind) -> Boolean) {
- AllClassesCompletion(
- parameters, lookupElementFactory, resolutionFacade, bindingContext, moduleDescriptor,
- searchScope, prefixMatcher, kindFilter, { isVisibleDescriptor(it) }
- ).collect(collector)
+ AllClassesCompletion(parameters, indicesHelper, prefixMatcher, kindFilter)
+ .collect(
+ { descriptor -> collector.addDescriptorElements(descriptor, suppressAutoInsertion = true) },
+ { javaClass -> collector.addElementWithAutoInsertionSuppressed(lookupElementFactory.createLookupElementForJavaClass(javaClass)) }
+ )
}
}
@@ -287,6 +288,11 @@ class BasicCompletionSession(configuration: CompletionSessionConfiguration,
null
}
+ private val parameterNameAndTypeCompletion = if (completionKind == CompletionKind.ANNOTATION_TYPES_OR_PARAMETER_NAME)
+ ParameterNameAndTypeCompletion(collector, lookupElementFactory, prefixMatcher)
+ else
+ null
+
private fun calcCompletionKind(): CompletionKind {
if (NamedArgumentCompletion.isOnlyNamedArgumentExpected(position)) {
return CompletionKind.NAMED_ARGUMENTS_ONLY
@@ -327,6 +333,8 @@ class BasicCompletionSession(configuration: CompletionSessionConfiguration,
if (completionKind != CompletionKind.NAMED_ARGUMENTS_ONLY) {
collector.addDescriptorElements(referenceVariants, suppressAutoInsertion = false)
+ parameterNameAndTypeCompletion?.addFromImports(reference!!.expression, bindingContext, { isVisibleDescriptor(it) })
+
val keywordsPrefix = prefix.substringBefore('@') // if there is '@' in the prefix - use shorter prefix to not loose 'this' etc
KeywordCompletion.complete(expression ?: parameters.getPosition(), keywordsPrefix) { lookupElement ->
val keyword = lookupElement.getLookupString()
@@ -405,6 +413,8 @@ class BasicCompletionSession(configuration: CompletionSessionConfiguration,
collector.addDescriptorElements(getTopLevelCallables(), suppressAutoInsertion = true)
}
}
+
+ parameterNameAndTypeCompletion?.addAll(parameters, indicesHelper)
}
}
diff --git a/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/KotlinCompletionContributor.kt b/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/KotlinCompletionContributor.kt
index 110e1b57854fc..deef7e1f7c996 100644
--- a/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/KotlinCompletionContributor.kt
+++ b/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/KotlinCompletionContributor.kt
@@ -225,11 +225,6 @@ public class KotlinCompletionContributor : CompletionContributor() {
if (parameters.getCompletionType() == CompletionType.BASIC) {
val session = BasicCompletionSession(configuration, parameters, result)
- if (session.completionKind == BasicCompletionSession.CompletionKind.ANNOTATION_TYPES_OR_PARAMETER_NAME && parameters.isAutoPopup()) {
- result.stopHere()
- return
- }
-
val somethingAdded = session.complete()
if (!somethingAdded && parameters.getInvocationCount() < 2) {
// Rerun completion if nothing was found
diff --git a/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/LookupElementsCollector.kt b/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/LookupElementsCollector.kt
index 3a49024f089e6..1ed9f088b3f09 100644
--- a/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/LookupElementsCollector.kt
+++ b/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/LookupElementsCollector.kt
@@ -69,7 +69,7 @@ class LookupElementsCollector(
}
}
- private fun addDescriptorElements(descriptor: DeclarationDescriptor, suppressAutoInsertion: Boolean, withReceiverCast: Boolean) {
+ public fun addDescriptorElements(descriptor: DeclarationDescriptor, suppressAutoInsertion: Boolean, withReceiverCast: Boolean = false) {
run {
var lookupElement = lookupElementFactory.createLookupElement(descriptor, true)
diff --git a/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/ParameterNameAndTypeCompletion.kt b/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/ParameterNameAndTypeCompletion.kt
new file mode 100644
index 0000000000000..269ebb6bc9e74
--- /dev/null
+++ b/idea/idea-completion/src/org/jetbrains/kotlin/idea/completion/ParameterNameAndTypeCompletion.kt
@@ -0,0 +1,127 @@
+/*
+ * Copyright 2010-2015 JetBrains s.r.o.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.jetbrains.kotlin.idea.completion
+
+import com.intellij.codeInsight.completion.CompletionParameters
+import com.intellij.codeInsight.completion.InsertionContext
+import com.intellij.codeInsight.completion.PrefixMatcher
+import com.intellij.codeInsight.lookup.LookupElement
+import com.intellij.codeInsight.lookup.LookupElementDecorator
+import com.intellij.codeInsight.lookup.LookupElementPresentation
+import com.intellij.psi.PsiClass
+import com.intellij.psi.codeStyle.CodeStyleSettingsManager
+import org.jetbrains.kotlin.descriptors.ClassifierDescriptor
+import org.jetbrains.kotlin.descriptors.DeclarationDescriptor
+import org.jetbrains.kotlin.idea.core.KotlinIndicesHelper
+import org.jetbrains.kotlin.idea.core.formatter.JetCodeStyleSettings
+import org.jetbrains.kotlin.idea.core.refactoring.EmptyValidator
+import org.jetbrains.kotlin.idea.core.refactoring.JetNameSuggester
+import org.jetbrains.kotlin.psi.JetSimpleNameExpression
+import org.jetbrains.kotlin.resolve.BindingContext
+import org.jetbrains.kotlin.resolve.scopes.DescriptorKindFilter
+import org.jetbrains.kotlin.resolve.scopes.getDescriptorsFiltered
+
+class ParameterNameAndTypeCompletion(
+ private val collector: LookupElementsCollector,
+ private val lookupElementFactory: LookupElementFactory,
+ private val prefixMatcher: PrefixMatcher
+) {
+ private val modifiedPrefixMatcher = prefixMatcher.cloneWithPrefix(prefixMatcher.getPrefix().capitalize())
+
+ public fun addFromImports(nameExpression: JetSimpleNameExpression, bindingContext: BindingContext, visibilityFilter: (DeclarationDescriptor) -> Boolean) {
+ if (prefixMatcher.getPrefix().isEmpty()) return
+
+ val resolutionScope = bindingContext[BindingContext.RESOLUTION_SCOPE, nameExpression] ?: return
+ val classifiers = resolutionScope.getDescriptorsFiltered(DescriptorKindFilter.NON_SINGLETON_CLASSIFIERS, modifiedPrefixMatcher.asNameFilter())
+
+ for (classifier in classifiers) {
+ if (visibilityFilter(classifier)) {
+ addSuggestionsForClassifier(classifier)
+ }
+ }
+ }
+
+ public fun addAll(parameters: CompletionParameters, indicesHelper: KotlinIndicesHelper) {
+ if (prefixMatcher.getPrefix().isEmpty()) return
+
+ AllClassesCompletion(parameters, indicesHelper, modifiedPrefixMatcher, { !it.isSingleton() })
+ .collect({ addSuggestionsForClassifier(it) }, { addSuggestionsForJavaClass(it) })
+ }
+
+ private fun addSuggestionsForClassifier(classifier: DeclarationDescriptor) {
+ addSuggestions(classifier.getName().asString()) { name -> NameAndDescriptorType(name, classifier as ClassifierDescriptor) }
+ }
+
+ private fun addSuggestionsForJavaClass(psiClass: PsiClass) {
+ addSuggestions(psiClass.getName()) { name -> NameAndJavaType(name, psiClass) }
+ }
+
+ private inline fun addSuggestions(className: String, nameAndTypeFactory: (String) -> NameAndType) {
+ val parameterNames = JetNameSuggester.getCamelNames(className, EmptyValidator)
+ for (parameterName in parameterNames) {
+ if (prefixMatcher.prefixMatches(parameterName)) {
+ val nameAndType = nameAndTypeFactory(parameterName)
+ collector.addElement(MyLookupElement(nameAndType, lookupElementFactory))
+ }
+ }
+ }
+
+ private interface NameAndType {
+ val parameterName: String
+
+ fun createTypeLookupElement(lookupElementFactory: LookupElementFactory): LookupElement
+ }
+
+ private data class NameAndDescriptorType(override val parameterName: String, val type: ClassifierDescriptor) : NameAndType {
+ override fun createTypeLookupElement(lookupElementFactory: LookupElementFactory)
+ = lookupElementFactory.createLookupElement(type, false)
+ }
+
+ private data class NameAndJavaType(override val parameterName: String, val type: PsiClass) : NameAndType {
+ override fun createTypeLookupElement(lookupElementFactory: LookupElementFactory)
+ = lookupElementFactory.createLookupElementForJavaClass(type)
+ }
+
+ private class MyLookupElement(
+ val nameAndType: NameAndType,
+ factory: LookupElementFactory
+ ) : LookupElementDecorator<LookupElement>(nameAndType.createTypeLookupElement(factory)) {
+ override fun getObject() = nameAndType
+
+ override fun equals(other: Any?)
+ = other is MyLookupElement && nameAndType.parameterName == other.getObject().parameterName && getDelegate() == other.getDelegate()
+ override fun hashCode() = nameAndType.parameterName.hashCode()
+
+ override fun getLookupString() = nameAndType.parameterName
+ override fun getAllLookupStrings() = setOf(nameAndType.parameterName)
+
+ override fun renderElement(presentation: LookupElementPresentation) {
+ super.renderElement(presentation)
+ presentation.setItemText(nameAndType.parameterName + ": " + presentation.getItemText())
+ }
+
+ override fun handleInsert(context: InsertionContext) {
+ super.handleInsert(context)
+
+ val settings = CodeStyleSettingsManager.getInstance(context.getProject()).getCurrentSettings().getCustomSettings(javaClass<JetCodeStyleSettings>())
+ val spaceBefore = if (settings.SPACE_BEFORE_TYPE_COLON) " " else ""
+ val spaceAfter = if (settings.SPACE_AFTER_TYPE_COLON) " " else ""
+ val text = nameAndType.parameterName + spaceBefore + ":" + spaceAfter
+ context.getDocument().insertString(context.getStartOffset(), text)
+ }
+ }
+}
\ No newline at end of file
diff --git a/idea/idea-completion/testData/basic/common/annotations/NoParameterAnnotationAutoPopup1.kt b/idea/idea-completion/testData/basic/common/annotations/NoParameterAnnotationAutoPopup1.kt
deleted file mode 100644
index 30d1f85466c72..0000000000000
--- a/idea/idea-completion/testData/basic/common/annotations/NoParameterAnnotationAutoPopup1.kt
+++ /dev/null
@@ -1,4 +0,0 @@
-fun foo(i<caret>) { }
-
-// INVOCATION_COUNT: 0
-// NUMBER: 0
diff --git a/idea/idea-completion/testData/basic/common/annotations/NoParameterAnnotationAutoPopup2.kt b/idea/idea-completion/testData/basic/common/annotations/NoParameterAnnotationAutoPopup2.kt
deleted file mode 100644
index 7453355e09ab3..0000000000000
--- a/idea/idea-completion/testData/basic/common/annotations/NoParameterAnnotationAutoPopup2.kt
+++ /dev/null
@@ -1,4 +0,0 @@
-fun foo(@inlineOptions i<caret>) { }
-
-// INVOCATION_COUNT: 0
-// NUMBER: 0
diff --git a/idea/idea-completion/testData/basic/common/parameterNameAndType/NoDuplication.kt b/idea/idea-completion/testData/basic/common/parameterNameAndType/NoDuplication.kt
new file mode 100644
index 0000000000000..3162454ff168d
--- /dev/null
+++ b/idea/idea-completion/testData/basic/common/parameterNameAndType/NoDuplication.kt
@@ -0,0 +1,6 @@
+import kotlin.properties.*
+
+fun f(readOnlyProp<caret>)
+
+// EXIST: { lookupString: "readOnlyProperty", itemText: "readOnlyProperty: ReadOnlyProperty", tailText: "<R, T> (kotlin.properties)" }
+// NUMBER: 1
diff --git a/idea/idea-completion/testData/basic/common/parameterNameAndType/NoDuplicationJava.kt b/idea/idea-completion/testData/basic/common/parameterNameAndType/NoDuplicationJava.kt
new file mode 100644
index 0000000000000..5f6b9fe24f15e
--- /dev/null
+++ b/idea/idea-completion/testData/basic/common/parameterNameAndType/NoDuplicationJava.kt
@@ -0,0 +1,6 @@
+import java.io.*
+
+fun f(printSt<caret>)
+
+// EXIST_JAVA_ONLY: { lookupString: "printStream", itemText: "printStream: PrintStream", tailText: " (java.io)" }
+// NUMBER_JAVA: 1
diff --git a/idea/idea-completion/testData/basic/common/parameterNameAndType/NotImported.kt b/idea/idea-completion/testData/basic/common/parameterNameAndType/NotImported.kt
new file mode 100644
index 0000000000000..181fb9881d1e2
--- /dev/null
+++ b/idea/idea-completion/testData/basic/common/parameterNameAndType/NotImported.kt
@@ -0,0 +1,3 @@
+fun f(read<caret>)
+
+// EXIST: { lookupString: "readOnlyProperty", itemText: "readOnlyProperty: ReadOnlyProperty", tailText: "<R, T> (kotlin.properties)" }
diff --git a/idea/idea-completion/testData/basic/common/parameterNameAndType/NotImportedJava.kt b/idea/idea-completion/testData/basic/common/parameterNameAndType/NotImportedJava.kt
new file mode 100644
index 0000000000000..3f30d7f894023
--- /dev/null
+++ b/idea/idea-completion/testData/basic/common/parameterNameAndType/NotImportedJava.kt
@@ -0,0 +1,3 @@
+fun f(file<caret>)
+
+// EXIST_JAVA_ONLY: { lookupString: "file", itemText: "file: File", tailText: " (java.io)" }
diff --git a/idea/idea-completion/testData/basic/common/parameterNameAndType/Simple.kt b/idea/idea-completion/testData/basic/common/parameterNameAndType/Simple.kt
new file mode 100644
index 0000000000000..ee9e2ceaf5fe4
--- /dev/null
+++ b/idea/idea-completion/testData/basic/common/parameterNameAndType/Simple.kt
@@ -0,0 +1,11 @@
+package pack
+
+class FooBar
+
+class Boo
+
+fun f(b<caret>)
+
+// EXIST: { lookupString: "bar", itemText: "bar: FooBar", tailText: " (pack)" }
+// EXIST: { lookupString: "fooBar", itemText: "fooBar: FooBar", tailText: " (pack)" }
+// EXIST: { lookupString: "boo", itemText: "boo: Boo", tailText: " (pack)" }
diff --git a/idea/idea-completion/testData/basic/java/NoSyntheticClasses.kt b/idea/idea-completion/testData/basic/java/NoSyntheticClasses.kt
new file mode 100644
index 0000000000000..2db57b5278471
--- /dev/null
+++ b/idea/idea-completion/testData/basic/java/NoSyntheticClasses.kt
@@ -0,0 +1,8 @@
+import kotlin.properties.*
+
+val x: ReadOnlyPr<caret>
+
+// INVOCATION_COUNT: 2
+// EXIST: "ReadOnlyProperty"
+// ABSENT: "ReadOnlyProperty$$TImpl"
+// NOTHING_ELSE
diff --git a/idea/idea-completion/testData/handlers/basic/highOrderFunctions/FunctionLiteralInsertWhenNoSpacesForBraces.kt b/idea/idea-completion/testData/handlers/basic/highOrderFunctions/FunctionLiteralInsertWhenNoSpacesForBraces.kt
index 701f67f739a71..7b6ee42f6f418 100644
--- a/idea/idea-completion/testData/handlers/basic/highOrderFunctions/FunctionLiteralInsertWhenNoSpacesForBraces.kt
+++ b/idea/idea-completion/testData/handlers/basic/highOrderFunctions/FunctionLiteralInsertWhenNoSpacesForBraces.kt
@@ -1,4 +1,4 @@
-// INSERT_WHITESPACES_IN_SIMPLE_ONE_LINE_METHOD: false
+// CODE_STYLE_SETTING: INSERT_WHITESPACES_IN_SIMPLE_ONE_LINE_METHOD = false
fun main(args: Array<String>) {
args.fil<caret>
diff --git a/idea/idea-completion/testData/handlers/basic/highOrderFunctions/FunctionLiteralInsertWhenNoSpacesForBraces.kt.after b/idea/idea-completion/testData/handlers/basic/highOrderFunctions/FunctionLiteralInsertWhenNoSpacesForBraces.kt.after
index 660ad3bff5757..c2441ce4a617a 100644
--- a/idea/idea-completion/testData/handlers/basic/highOrderFunctions/FunctionLiteralInsertWhenNoSpacesForBraces.kt.after
+++ b/idea/idea-completion/testData/handlers/basic/highOrderFunctions/FunctionLiteralInsertWhenNoSpacesForBraces.kt.after
@@ -1,4 +1,4 @@
-// INSERT_WHITESPACES_IN_SIMPLE_ONE_LINE_METHOD: false
+// CODE_STYLE_SETTING: INSERT_WHITESPACES_IN_SIMPLE_ONE_LINE_METHOD = false
fun main(args: Array<String>) {
args.filter {<caret>}
diff --git a/idea/idea-completion/testData/handlers/basic/parameterNameAndType/CodeStyleSettings.kt b/idea/idea-completion/testData/handlers/basic/parameterNameAndType/CodeStyleSettings.kt
new file mode 100644
index 0000000000000..0220ca6e71761
--- /dev/null
+++ b/idea/idea-completion/testData/handlers/basic/parameterNameAndType/CodeStyleSettings.kt
@@ -0,0 +1,8 @@
+// CODE_STYLE_SETTING: SPACE_BEFORE_TYPE_COLON = true
+// CODE_STYLE_SETTING: SPACE_AFTER_TYPE_COLON = false
+
+class FooBar
+
+fun f(b<caret>)
+
+// ELEMENT: bar
diff --git a/idea/idea-completion/testData/handlers/basic/parameterNameAndType/CodeStyleSettings.kt.after b/idea/idea-completion/testData/handlers/basic/parameterNameAndType/CodeStyleSettings.kt.after
new file mode 100644
index 0000000000000..4a35270257d5d
--- /dev/null
+++ b/idea/idea-completion/testData/handlers/basic/parameterNameAndType/CodeStyleSettings.kt.after
@@ -0,0 +1,8 @@
+// CODE_STYLE_SETTING: SPACE_BEFORE_TYPE_COLON = true
+// CODE_STYLE_SETTING: SPACE_AFTER_TYPE_COLON = false
+
+class FooBar
+
+fun f(bar :FooBar<caret>)
+
+// ELEMENT: bar
diff --git a/idea/idea-completion/testData/handlers/basic/parameterNameAndType/Comma.kt b/idea/idea-completion/testData/handlers/basic/parameterNameAndType/Comma.kt
new file mode 100644
index 0000000000000..21825a1893994
--- /dev/null
+++ b/idea/idea-completion/testData/handlers/basic/parameterNameAndType/Comma.kt
@@ -0,0 +1,6 @@
+class FooBar
+
+fun f(b<caret>)
+
+// ELEMENT: bar
+// CHAR: ','
diff --git a/idea/idea-completion/testData/handlers/basic/parameterNameAndType/Comma.kt.after b/idea/idea-completion/testData/handlers/basic/parameterNameAndType/Comma.kt.after
new file mode 100644
index 0000000000000..e2fc7d0a67e0a
--- /dev/null
+++ b/idea/idea-completion/testData/handlers/basic/parameterNameAndType/Comma.kt.after
@@ -0,0 +1,6 @@
+class FooBar
+
+fun f(bar: FooBar, <caret>)
+
+// ELEMENT: bar
+// CHAR: ','
diff --git a/idea/idea-completion/testData/handlers/basic/parameterNameAndType/InsertImport.kt b/idea/idea-completion/testData/handlers/basic/parameterNameAndType/InsertImport.kt
new file mode 100644
index 0000000000000..c44f6430b18a3
--- /dev/null
+++ b/idea/idea-completion/testData/handlers/basic/parameterNameAndType/InsertImport.kt
@@ -0,0 +1,3 @@
+fun f(file<caret>)
+
+// ELEMENT_TEXT: "file: File"
diff --git a/idea/idea-completion/testData/handlers/basic/parameterNameAndType/InsertImport.kt.after b/idea/idea-completion/testData/handlers/basic/parameterNameAndType/InsertImport.kt.after
new file mode 100644
index 0000000000000..cf38dafd0378e
--- /dev/null
+++ b/idea/idea-completion/testData/handlers/basic/parameterNameAndType/InsertImport.kt.after
@@ -0,0 +1,5 @@
+import java.io.File
+
+fun f(file: File<caret>)
+
+// ELEMENT_TEXT: "file: File"
diff --git a/idea/idea-completion/testData/handlers/basic/parameterNameAndType/Simple.kt b/idea/idea-completion/testData/handlers/basic/parameterNameAndType/Simple.kt
new file mode 100644
index 0000000000000..77b3bbb40823d
--- /dev/null
+++ b/idea/idea-completion/testData/handlers/basic/parameterNameAndType/Simple.kt
@@ -0,0 +1,5 @@
+class FooBar
+
+fun f(b<caret>)
+
+// ELEMENT: bar
diff --git a/idea/idea-completion/testData/handlers/basic/parameterNameAndType/Simple.kt.after b/idea/idea-completion/testData/handlers/basic/parameterNameAndType/Simple.kt.after
new file mode 100644
index 0000000000000..5f3ad6bd6c53c
--- /dev/null
+++ b/idea/idea-completion/testData/handlers/basic/parameterNameAndType/Simple.kt.after
@@ -0,0 +1,5 @@
+class FooBar
+
+fun f(bar: FooBar<caret>)
+
+// ELEMENT: bar
diff --git a/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JSBasicCompletionTestGenerated.java b/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JSBasicCompletionTestGenerated.java
index 479349908c5b7..7337e37100dbd 100644
--- a/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JSBasicCompletionTestGenerated.java
+++ b/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JSBasicCompletionTestGenerated.java
@@ -1053,18 +1053,6 @@ public void testFunctionAnnotation2() throws Exception {
doTest(fileName);
}
- @TestMetadata("NoParameterAnnotationAutoPopup1.kt")
- public void testNoParameterAnnotationAutoPopup1() throws Exception {
- String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/annotations/NoParameterAnnotationAutoPopup1.kt");
- doTest(fileName);
- }
-
- @TestMetadata("NoParameterAnnotationAutoPopup2.kt")
- public void testNoParameterAnnotationAutoPopup2() throws Exception {
- String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/annotations/NoParameterAnnotationAutoPopup2.kt");
- doTest(fileName);
- }
-
@TestMetadata("ParameterAnnotation1.kt")
public void testParameterAnnotation1() throws Exception {
String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/annotations/ParameterAnnotation1.kt");
@@ -1402,6 +1390,45 @@ public void testWithParameterExpression() throws Exception {
}
}
+ @TestMetadata("idea/idea-completion/testData/basic/common/parameterNameAndType")
+ @TestDataPath("$PROJECT_ROOT")
+ @RunWith(JUnit3RunnerWithInners.class)
+ public static class ParameterNameAndType extends AbstractJSBasicCompletionTest {
+ public void testAllFilesPresentInParameterNameAndType() throws Exception {
+ JetTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("idea/idea-completion/testData/basic/common/parameterNameAndType"), Pattern.compile("^(.+)\\.kt$"), true);
+ }
+
+ @TestMetadata("NoDuplication.kt")
+ public void testNoDuplication() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/parameterNameAndType/NoDuplication.kt");
+ doTest(fileName);
+ }
+
+ @TestMetadata("NoDuplicationJava.kt")
+ public void testNoDuplicationJava() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/parameterNameAndType/NoDuplicationJava.kt");
+ doTest(fileName);
+ }
+
+ @TestMetadata("NotImported.kt")
+ public void testNotImported() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/parameterNameAndType/NotImported.kt");
+ doTest(fileName);
+ }
+
+ @TestMetadata("NotImportedJava.kt")
+ public void testNotImportedJava() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/parameterNameAndType/NotImportedJava.kt");
+ doTest(fileName);
+ }
+
+ @TestMetadata("Simple.kt")
+ public void testSimple() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/parameterNameAndType/Simple.kt");
+ doTest(fileName);
+ }
+ }
+
@TestMetadata("idea/idea-completion/testData/basic/common/shadowing")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
diff --git a/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JvmBasicCompletionTestGenerated.java b/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JvmBasicCompletionTestGenerated.java
index 3b103f0f601ee..33b3cd9b38799 100644
--- a/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JvmBasicCompletionTestGenerated.java
+++ b/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/JvmBasicCompletionTestGenerated.java
@@ -1053,18 +1053,6 @@ public void testFunctionAnnotation2() throws Exception {
doTest(fileName);
}
- @TestMetadata("NoParameterAnnotationAutoPopup1.kt")
- public void testNoParameterAnnotationAutoPopup1() throws Exception {
- String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/annotations/NoParameterAnnotationAutoPopup1.kt");
- doTest(fileName);
- }
-
- @TestMetadata("NoParameterAnnotationAutoPopup2.kt")
- public void testNoParameterAnnotationAutoPopup2() throws Exception {
- String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/annotations/NoParameterAnnotationAutoPopup2.kt");
- doTest(fileName);
- }
-
@TestMetadata("ParameterAnnotation1.kt")
public void testParameterAnnotation1() throws Exception {
String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/annotations/ParameterAnnotation1.kt");
@@ -1402,6 +1390,45 @@ public void testWithParameterExpression() throws Exception {
}
}
+ @TestMetadata("idea/idea-completion/testData/basic/common/parameterNameAndType")
+ @TestDataPath("$PROJECT_ROOT")
+ @RunWith(JUnit3RunnerWithInners.class)
+ public static class ParameterNameAndType extends AbstractJvmBasicCompletionTest {
+ public void testAllFilesPresentInParameterNameAndType() throws Exception {
+ JetTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("idea/idea-completion/testData/basic/common/parameterNameAndType"), Pattern.compile("^(.+)\\.kt$"), true);
+ }
+
+ @TestMetadata("NoDuplication.kt")
+ public void testNoDuplication() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/parameterNameAndType/NoDuplication.kt");
+ doTest(fileName);
+ }
+
+ @TestMetadata("NoDuplicationJava.kt")
+ public void testNoDuplicationJava() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/parameterNameAndType/NoDuplicationJava.kt");
+ doTest(fileName);
+ }
+
+ @TestMetadata("NotImported.kt")
+ public void testNotImported() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/parameterNameAndType/NotImported.kt");
+ doTest(fileName);
+ }
+
+ @TestMetadata("NotImportedJava.kt")
+ public void testNotImportedJava() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/parameterNameAndType/NotImportedJava.kt");
+ doTest(fileName);
+ }
+
+ @TestMetadata("Simple.kt")
+ public void testSimple() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/common/parameterNameAndType/Simple.kt");
+ doTest(fileName);
+ }
+ }
+
@TestMetadata("idea/idea-completion/testData/basic/common/shadowing")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
@@ -1696,6 +1723,12 @@ public void testNoDuplicationForRuntimeClass() throws Exception {
doTest(fileName);
}
+ @TestMetadata("NoSyntheticClasses.kt")
+ public void testNoSyntheticClasses() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/java/NoSyntheticClasses.kt");
+ doTest(fileName);
+ }
+
@TestMetadata("PackageDirective.kt")
public void testPackageDirective() throws Exception {
String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/basic/java/PackageDirective.kt");
diff --git a/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/handlers/AbstractCompletionHandlerTests.kt b/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/handlers/AbstractCompletionHandlerTests.kt
index 9f6265c08b0d7..56531b30b42c5 100644
--- a/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/handlers/AbstractCompletionHandlerTests.kt
+++ b/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/handlers/AbstractCompletionHandlerTests.kt
@@ -22,6 +22,7 @@ import com.intellij.psi.codeStyle.CodeStyleSettingsManager
import org.jetbrains.kotlin.idea.core.formatter.JetCodeStyleSettings
import org.jetbrains.kotlin.idea.test.JetWithJdkAndRuntimeLightProjectDescriptor
import org.jetbrains.kotlin.test.InTextDirectivesUtils
+import org.jetbrains.kotlin.utils.addToStdlib.indexOfOrNull
import java.io.File
public abstract class AbstractCompletionHandlerTest(private val defaultCompletionType: CompletionType) : CompletionHandlerTestBase() {
@@ -31,7 +32,7 @@ public abstract class AbstractCompletionHandlerTest(private val defaultCompletio
private val TAIL_TEXT_PREFIX = "TAIL_TEXT:"
private val COMPLETION_CHAR_PREFIX = "CHAR:"
private val COMPLETION_TYPE_PREFIX = "COMPLETION_TYPE:"
- private val INSERT_WHITESPACES_IN_SIMPLE_ONE_LINE_METHOD = "INSERT_WHITESPACES_IN_SIMPLE_ONE_LINE_METHOD:"
+ private val CODE_STYLE_SETTING_PREFIX = "CODE_STYLE_SETTING:"
protected open fun doTest(testPath: String) {
setUpFixture(testPath)
@@ -61,8 +62,17 @@ public abstract class AbstractCompletionHandlerTest(private val defaultCompletio
else -> error("Unknown completion type: $completionTypeString")
}
- InTextDirectivesUtils.getPrefixedBoolean(fileText, INSERT_WHITESPACES_IN_SIMPLE_ONE_LINE_METHOD)?.let {
- JetCodeStyleSettings.getInstance(getProject()).INSERT_WHITESPACES_IN_SIMPLE_ONE_LINE_METHOD = it
+ val codeStyleSettings = JetCodeStyleSettings.getInstance(getProject())
+ for (line in InTextDirectivesUtils.findLinesWithPrefixesRemoved(fileText, CODE_STYLE_SETTING_PREFIX)) {
+ val index = line.indexOfOrNull('=') ?: error("Invalid code style setting '$line': '=' expected")
+ val settingName = line.substring(0, index).trim()
+ val settingValue = line.substring(index + 1).trim()
+ val field = codeStyleSettings.javaClass.getDeclaredField(settingName)
+ when (field.getType().getName()) {
+ "boolean" -> field.setBoolean(codeStyleSettings, settingValue.toBoolean())
+ "int" -> field.setInt(codeStyleSettings, settingValue.toInt())
+ else -> error("Unsupported setting type: ${field.getType()}")
+ }
}
doTestWithTextLoaded(completionType, invocationCount, lookupString, itemText, tailText, completionChar, testPath + ".after")
diff --git a/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/handlers/BasicCompletionHandlerTestGenerated.java b/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/handlers/BasicCompletionHandlerTestGenerated.java
index 9fdb38f9459f7..5740ee686ab66 100644
--- a/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/handlers/BasicCompletionHandlerTestGenerated.java
+++ b/idea/idea-completion/tests/org/jetbrains/kotlin/idea/completion/test/handlers/BasicCompletionHandlerTestGenerated.java
@@ -239,6 +239,39 @@ public void testWithArgsNonEmptyLambdaAfter() throws Exception {
}
}
+ @TestMetadata("idea/idea-completion/testData/handlers/basic/parameterNameAndType")
+ @TestDataPath("$PROJECT_ROOT")
+ @RunWith(JUnit3RunnerWithInners.class)
+ public static class ParameterNameAndType extends AbstractBasicCompletionHandlerTest {
+ public void testAllFilesPresentInParameterNameAndType() throws Exception {
+ JetTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("idea/idea-completion/testData/handlers/basic/parameterNameAndType"), Pattern.compile("^(.+)\\.kt$"), true);
+ }
+
+ @TestMetadata("CodeStyleSettings.kt")
+ public void testCodeStyleSettings() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/handlers/basic/parameterNameAndType/CodeStyleSettings.kt");
+ doTest(fileName);
+ }
+
+ @TestMetadata("Comma.kt")
+ public void testComma() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/handlers/basic/parameterNameAndType/Comma.kt");
+ doTest(fileName);
+ }
+
+ @TestMetadata("InsertImport.kt")
+ public void testInsertImport() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/handlers/basic/parameterNameAndType/InsertImport.kt");
+ doTest(fileName);
+ }
+
+ @TestMetadata("Simple.kt")
+ public void testSimple() throws Exception {
+ String fileName = JetTestUtils.navigationMetadata("idea/idea-completion/testData/handlers/basic/parameterNameAndType/Simple.kt");
+ doTest(fileName);
+ }
+ }
+
@TestMetadata("idea/idea-completion/testData/handlers/basic/stringTemplate")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
diff --git a/idea/idea-core/src/org/jetbrains/kotlin/idea/core/KotlinIndicesHelper.kt b/idea/idea-core/src/org/jetbrains/kotlin/idea/core/KotlinIndicesHelper.kt
index 52f238ca2eff4..f66331389cd58 100644
--- a/idea/idea-core/src/org/jetbrains/kotlin/idea/core/KotlinIndicesHelper.kt
+++ b/idea/idea-core/src/org/jetbrains/kotlin/idea/core/KotlinIndicesHelper.kt
@@ -46,7 +46,6 @@ import java.util.LinkedHashSet
public class KotlinIndicesHelper(
private val project: Project,
private val resolutionFacade: ResolutionFacade,
- private val bindingContext: BindingContext,
private val scope: GlobalSearchScope,
private val moduleDescriptor: ModuleDescriptor,
private val visibilityFilter: (DeclarationDescriptor) -> Boolean
@@ -73,36 +72,36 @@ public class KotlinIndicesHelper(
}
public fun getTopLevelCallables(nameFilter: (String) -> Boolean): Collection<CallableDescriptor> {
- return (JetTopLevelFunctionFqnNameIndex.getInstance().getAllKeys(project).sequence() +
- JetTopLevelPropertyFqnNameIndex.getInstance().getAllKeys(project).sequence())
+ return (JetTopLevelFunctionFqnNameIndex.getInstance().getAllKeys(project).asSequence() +
+ JetTopLevelPropertyFqnNameIndex.getInstance().getAllKeys(project).asSequence())
.map { FqName(it) }
.filter { nameFilter(it.shortName().asString()) }
.toSet()
.flatMap { findTopLevelCallables(it).filter(visibilityFilter) }
}
- public fun getCallableTopLevelExtensions(nameFilter: (String) -> Boolean, expression: JetSimpleNameExpression): Collection<CallableDescriptor> {
- val receiverValues = receiverValues(expression)
+ public fun getCallableTopLevelExtensions(nameFilter: (String) -> Boolean, expression: JetSimpleNameExpression, bindingContext: BindingContext): Collection<CallableDescriptor> {
+ val receiverValues = receiverValues(expression, bindingContext)
if (receiverValues.isEmpty()) return emptyList()
val dataFlowInfo = bindingContext.getDataFlowInfo(expression)
- val receiverTypeNames = possibleReceiverTypeNames(receiverValues.map { it.first }, dataFlowInfo)
+ val receiverTypeNames = possibleReceiverTypeNames(receiverValues.map { it.first }, dataFlowInfo, bindingContext)
val index = JetTopLevelExtensionsByReceiverTypeIndex.INSTANCE
val declarations = index.getAllKeys(project)
- .sequence()
+ .asSequence()
.filter {
JetTopLevelExtensionsByReceiverTypeIndex.receiverTypeNameFromKey(it) in receiverTypeNames
&& nameFilter(JetTopLevelExtensionsByReceiverTypeIndex.callableNameFromKey(it))
}
- .flatMap { index.get(it, project, scope).sequence() }
+ .flatMap { index.get(it, project, scope).asSequence() }
return findSuitableExtensions(declarations, receiverValues, dataFlowInfo, bindingContext)
}
- private fun possibleReceiverTypeNames(receiverValues: Collection<ReceiverValue>, dataFlowInfo: DataFlowInfo): Set<String> {
+ private fun possibleReceiverTypeNames(receiverValues: Collection<ReceiverValue>, dataFlowInfo: DataFlowInfo, bindingContext: BindingContext): Set<String> {
val result = HashSet<String>()
for (receiverValue in receiverValues) {
for (type in SmartCastUtils.getSmartCastVariants(receiverValue, bindingContext, moduleDescriptor, dataFlowInfo)) {
@@ -118,7 +117,7 @@ public class KotlinIndicesHelper(
constructor.getSupertypes().forEach { addTypeNames(it) }
}
- private fun receiverValues(expression: JetSimpleNameExpression): Collection<Pair<ReceiverValue, CallType>> {
+ private fun receiverValues(expression: JetSimpleNameExpression, bindingContext: BindingContext): Collection<Pair<ReceiverValue, CallType>> {
val receiverPair = ReferenceVariantsHelper.getExplicitReceiverData(expression)
if (receiverPair != null) {
val (receiverExpression, callType) = receiverPair
@@ -173,7 +172,7 @@ public class KotlinIndicesHelper(
}
public fun getClassDescriptors(nameFilter: (String) -> Boolean, kindFilter: (ClassKind) -> Boolean): Collection<ClassDescriptor> {
- return JetFullClassNameIndex.getInstance().getAllKeys(project).sequence()
+ return JetFullClassNameIndex.getInstance().getAllKeys(project).asSequence()
.map { FqName(it) }
.filter { nameFilter(it.shortName().asString()) }
.toList()
diff --git a/idea/idea-core/src/org/jetbrains/kotlin/idea/core/refactoring/JetNameSuggester.java b/idea/idea-core/src/org/jetbrains/kotlin/idea/core/refactoring/JetNameSuggester.java
index cbdc3957abcad..66acdc5e5eccb 100644
--- a/idea/idea-core/src/org/jetbrains/kotlin/idea/core/refactoring/JetNameSuggester.java
+++ b/idea/idea-core/src/org/jetbrains/kotlin/idea/core/refactoring/JetNameSuggester.java
@@ -37,6 +37,7 @@
import org.jetbrains.kotlin.types.checker.JetTypeChecker;
import java.util.ArrayList;
+import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -198,6 +199,12 @@ private static void addForClassType(ArrayList<String> result, JetType jetType, J
private static final String[] ACCESSOR_PREFIXES = { "get", "is", "set" };
+ public static List<String> getCamelNames(String name, JetNameValidator validator) {
+ ArrayList<String> result = new ArrayList<String>();
+ addCamelNames(result, name, validator);
+ return result;
+ }
+
private static void addCamelNames(ArrayList<String> result, String name, JetNameValidator validator) {
if (name == "") return;
String s = deleteNonLetterFromString(name);
diff --git a/idea/src/org/jetbrains/kotlin/idea/quickfix/AutoImportFix.kt b/idea/src/org/jetbrains/kotlin/idea/quickfix/AutoImportFix.kt
index c528b5863ca32..d9ae0e1215a25 100644
--- a/idea/src/org/jetbrains/kotlin/idea/quickfix/AutoImportFix.kt
+++ b/idea/src/org/jetbrains/kotlin/idea/quickfix/AutoImportFix.kt
@@ -138,7 +138,7 @@ public class AutoImportFix(element: JetSimpleNameExpression) : JetHintAction<Jet
val result = ArrayList<DeclarationDescriptor>()
val moduleDescriptor = resolutionFacade.findModuleDescriptor(element)
- val indicesHelper = KotlinIndicesHelper(file.getProject(), resolutionFacade, bindingContext, searchScope, moduleDescriptor, ::isVisible)
+ val indicesHelper = KotlinIndicesHelper(file.getProject(), resolutionFacade, searchScope, moduleDescriptor, ::isVisible)
if (!element.isImportDirectiveExpression() && !JetPsiUtil.isSelectorInQualified(element)) {
if (ProjectStructureUtil.isJsKotlinModule(file)) {
@@ -150,7 +150,7 @@ public class AutoImportFix(element: JetSimpleNameExpression) : JetHintAction<Jet
result.addAll(indicesHelper.getTopLevelCallablesByName(referenceName))
}
- result.addAll(indicesHelper.getCallableTopLevelExtensions({ it == referenceName }, element))
+ result.addAll(indicesHelper.getCallableTopLevelExtensions({ it == referenceName }, element, bindingContext))
return result
}
|
b6277ac14d5c35c8f5c80616829868c4ea74abfa
|
elasticsearch
|
increase netty worker count to default to the- updated value netty defaults to--
|
p
|
https://github.com/elastic/elasticsearch
|
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java b/modules/elasticsearch/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java
index a94c06ea6293a..1b9535487c5fb 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java
@@ -106,7 +106,7 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
super(settings);
this.networkService = networkService;
ByteSizeValue maxContentLength = componentSettings.getAsBytesSize("max_content_length", settings.getAsBytesSize("http.max_content_length", new ByteSizeValue(100, ByteSizeUnit.MB)));
- this.workerCount = componentSettings.getAsInt("worker_count", Runtime.getRuntime().availableProcessors());
+ this.workerCount = componentSettings.getAsInt("worker_count", Runtime.getRuntime().availableProcessors() * 2);
this.blockingServer = settings.getAsBoolean("http.blocking_server", settings.getAsBoolean(TCP_BLOCKING_SERVER, settings.getAsBoolean(TCP_BLOCKING, false)));
this.port = componentSettings.get("port", settings.get("http.port", "9200-9300"));
this.bindHost = componentSettings.get("bind_host");
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java
index 908dcd391fb21..10dd931032cea 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java
@@ -143,7 +143,7 @@ public NettyTransport(Settings settings, ThreadPool threadPool) {
this.threadPool = threadPool;
this.networkService = networkService;
- this.workerCount = componentSettings.getAsInt("worker_count", Runtime.getRuntime().availableProcessors());
+ this.workerCount = componentSettings.getAsInt("worker_count", Runtime.getRuntime().availableProcessors() * 2);
this.blockingServer = settings.getAsBoolean("transport.tcp.blocking_server", settings.getAsBoolean(TCP_BLOCKING_SERVER, settings.getAsBoolean(TCP_BLOCKING, false)));
this.blockingClient = settings.getAsBoolean("transport.tcp.blocking_client", settings.getAsBoolean(TCP_BLOCKING_CLIENT, settings.getAsBoolean(TCP_BLOCKING, false)));
this.port = componentSettings.get("port", settings.get("transport.tcp.port", "9300-9400"));
diff --git a/plugins/transport/memcached/src/main/java/org/elasticsearch/memcached/netty/NettyMemcachedServerTransport.java b/plugins/transport/memcached/src/main/java/org/elasticsearch/memcached/netty/NettyMemcachedServerTransport.java
index a14485a57fc1c..c4f5f5c5cbb8e 100644
--- a/plugins/transport/memcached/src/main/java/org/elasticsearch/memcached/netty/NettyMemcachedServerTransport.java
+++ b/plugins/transport/memcached/src/main/java/org/elasticsearch/memcached/netty/NettyMemcachedServerTransport.java
@@ -93,7 +93,7 @@ public class NettyMemcachedServerTransport extends AbstractLifecycleComponent<Me
this.restController = restController;
this.networkService = networkService;
- this.workerCount = componentSettings.getAsInt("worker_count", Runtime.getRuntime().availableProcessors());
+ this.workerCount = componentSettings.getAsInt("worker_count", Runtime.getRuntime().availableProcessors() * 2);
this.blockingServer = componentSettings.getAsBoolean("memcached.blocking_server", settings.getAsBoolean(TCP_BLOCKING_SERVER, settings.getAsBoolean(TCP_BLOCKING, false)));
this.port = componentSettings.get("port", settings.get("memcached.port", "11211-11311"));
this.bindHost = componentSettings.get("bind_host");
|
e4671f5ff0a01bd22e6d21f666188380646821c2
|
camel
|
CAMEL-751 Get the CamelContext ErrorHandler to- work--git-svn-id: https://svn.apache.org/repos/asf/activemq/camel/trunk@679345 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/main/java/org/apache/camel/CamelContext.java b/camel-core/src/main/java/org/apache/camel/CamelContext.java
index f248808bbbe77..17d31c112e246 100644
--- a/camel-core/src/main/java/org/apache/camel/CamelContext.java
+++ b/camel-core/src/main/java/org/apache/camel/CamelContext.java
@@ -208,10 +208,10 @@ public interface CamelContext extends Service {
/**
* Gets the default error handler builder which is inherited by the routes
*/
- public ErrorHandlerBuilder getErrorHandlerBuilder();
+ ErrorHandlerBuilder getErrorHandlerBuilder();
/**
* Sets the default error handler builder which is inherited by the routes
*/
- public void setErrorHandlerBuilder(ErrorHandlerBuilder errorHandlerBuilder);
+ void setErrorHandlerBuilder(ErrorHandlerBuilder errorHandlerBuilder);
}
diff --git a/camel-core/src/main/java/org/apache/camel/builder/BuilderSupport.java b/camel-core/src/main/java/org/apache/camel/builder/BuilderSupport.java
index 9524700335890..21a5592c1665f 100644
--- a/camel-core/src/main/java/org/apache/camel/builder/BuilderSupport.java
+++ b/camel-core/src/main/java/org/apache/camel/builder/BuilderSupport.java
@@ -247,11 +247,7 @@ public ErrorHandlerBuilder getErrorHandlerBuilder() {
protected ErrorHandlerBuilder createErrorHandlerBuilder() {
if (isInheritErrorHandler()) {
- ErrorHandlerBuilder errorHandler= context.getErrorHandlerBuilder();
- if (errorHandler == null) {
- errorHandler = new DeadLetterChannelBuilder();
- }
- return errorHandler;
+ return new DeadLetterChannelBuilder();
} else {
return new NoErrorHandlerBuilder();
}
diff --git a/camel-core/src/main/java/org/apache/camel/builder/RouteBuilder.java b/camel-core/src/main/java/org/apache/camel/builder/RouteBuilder.java
index d188e8360436c..7b690fbab1a12 100644
--- a/camel-core/src/main/java/org/apache/camel/builder/RouteBuilder.java
+++ b/camel-core/src/main/java/org/apache/camel/builder/RouteBuilder.java
@@ -173,6 +173,11 @@ public void setErrorHandlerBuilder(ErrorHandlerBuilder errorHandlerBuilder) {
// -----------------------------------------------------------------------
protected void checkInitialized() throws Exception {
if (initialized.compareAndSet(false, true)) {
+ // Set the CamelContext ErrorHandler here
+ CamelContext camelContext = getContext();
+ if (camelContext.getErrorHandlerBuilder() != null) {
+ setErrorHandlerBuilder(camelContext.getErrorHandlerBuilder());
+ }
configure();
populateRoutes(routes);
}
@@ -194,21 +199,21 @@ public void setRouteCollection(RoutesType routeCollection) {
public RoutesType getRouteCollection() {
return this.routeCollection;
}
-
+
/**
- * Completely disable stream caching for all routes being defined in the same RouteBuilder after this.
+ * Completely disable stream caching for all routes being defined in the same RouteBuilder after this.
*/
public void noStreamCaching() {
StreamCachingInterceptor.noStreamCaching(routeCollection.getInterceptors());
}
-
+
/**
* Enable stream caching for all routes being defined in the same RouteBuilder after this call.
*/
public void streamCaching() {
routeCollection.intercept(new StreamCachingInterceptor());
}
-
+
/**
* Factory method
*/
diff --git a/camel-core/src/main/java/org/apache/camel/builder/xml/MessageVariableResolver.java b/camel-core/src/main/java/org/apache/camel/builder/xml/MessageVariableResolver.java
index b56a358323ec2..b7ab43dd60bbc 100644
--- a/camel-core/src/main/java/org/apache/camel/builder/xml/MessageVariableResolver.java
+++ b/camel-core/src/main/java/org/apache/camel/builder/xml/MessageVariableResolver.java
@@ -37,7 +37,7 @@
* A variable resolver for XPath expressions which support properties on the
* messge, exchange as well as making system properties and environment
* properties available.
- *
+ *
* @version $Revision$
*/
public class MessageVariableResolver implements XPathVariableResolver {
@@ -75,8 +75,8 @@ public Object resolveVariable(QName name) {
try {
answer = System.getProperty(localPart);
} catch (Exception e) {
- LOG.debug("Security exception evaluating system property: " + localPart +
- ". Reason: " + e, e);
+ LOG.debug("Security exception evaluating system property: " + localPart
+ + ". Reason: " + e, e);
}
} else if (uri.equals(ENVIRONMENT_VARIABLES)) {
answer = System.getenv().get(localPart);
diff --git a/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java b/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java
index 5884c1487119d..5f4719afa6a49 100644
--- a/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java
+++ b/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java
@@ -338,7 +338,7 @@ public void addRoutes(Collection<Route> routes) throws Exception {
public void addRoutes(Routes builder) throws Exception {
// lets now add the routes from the builder
- builder.setContext(this);
+ builder.setContext(this);
List<Route> routeList = builder.getRouteList();
LOG.debug("Adding routes from: " + builder + " routes: " + routeList);
addRoutes(routeList);
diff --git a/camel-core/src/main/java/org/apache/camel/model/ProcessorType.java b/camel-core/src/main/java/org/apache/camel/model/ProcessorType.java
index 6d3e09327ae6d..1d4d3b635aab5 100644
--- a/camel-core/src/main/java/org/apache/camel/model/ProcessorType.java
+++ b/camel-core/src/main/java/org/apache/camel/model/ProcessorType.java
@@ -1596,7 +1596,7 @@ protected ErrorHandlerBuilder createErrorHandlerBuilder() {
if (errorHandlerRef != null) {
return new ErrorHandlerBuilderRef(errorHandlerRef);
}
- if (isInheritErrorHandler()) {
+ if (isInheritErrorHandler()) {
return new DeadLetterChannelBuilder();
} else {
return new NoErrorHandlerBuilder();
diff --git a/camel-core/src/test/java/org/apache/camel/builder/ContextErrorHandlerTest.java b/camel-core/src/test/java/org/apache/camel/builder/ContextErrorHandlerTest.java
new file mode 100644
index 0000000000000..70a7406424728
--- /dev/null
+++ b/camel-core/src/test/java/org/apache/camel/builder/ContextErrorHandlerTest.java
@@ -0,0 +1,115 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.builder;
+
+import java.util.List;
+
+import org.apache.camel.CamelContext;
+import org.apache.camel.ContextTestSupport;
+import org.apache.camel.Endpoint;
+import org.apache.camel.Processor;
+import org.apache.camel.Route;
+import org.apache.camel.TestSupport;
+import org.apache.camel.impl.DefaultCamelContext;
+import org.apache.camel.impl.EventDrivenConsumerRoute;
+import org.apache.camel.processor.DeadLetterChannel;
+import org.apache.camel.processor.LoggingErrorHandler;
+import org.apache.camel.processor.RedeliveryPolicy;
+import org.apache.camel.processor.SendProcessor;
+
+public class ContextErrorHandlerTest extends ContextTestSupport {
+
+ protected void setUp() throws Exception {
+ setUseRouteBuilder(false);
+ super.setUp();
+ RedeliveryPolicy redeliveryPolicy = new RedeliveryPolicy();
+ redeliveryPolicy.maximumRedeliveries(1);
+ redeliveryPolicy.setUseExponentialBackOff(true);
+ DeadLetterChannelBuilder deadLetterChannelBuilder = new DeadLetterChannelBuilder();
+ deadLetterChannelBuilder.setRedeliveryPolicy(redeliveryPolicy);
+ context.setErrorHandlerBuilder(deadLetterChannelBuilder);
+ }
+
+ protected void startCamelContext() throws Exception {
+ // do nothing here
+ }
+
+ protected void stopCamelContext() throws Exception {
+ // do nothing here
+ }
+
+ protected List<Route> getRouteList(RouteBuilder builder) throws Exception {
+ context.addRoutes(builder);
+ context.start();
+ List<Route> answer = context.getRoutes();
+ context.stop();
+ return answer;
+ }
+
+ public void testOverloadingTheDefaultErrorHandler() throws Exception {
+
+ RouteBuilder builder = new RouteBuilder() {
+ public void configure() {
+ errorHandler(loggingErrorHandler("FOO.BAR"));
+ from("seda:a").to("seda:b");
+ }
+ };
+
+ List<Route> list = getRouteList(builder);
+ assertEquals("Number routes created" + list, 1, list.size());
+ for (Route route : list) {
+ Endpoint key = route.getEndpoint();
+ assertEquals("From endpoint", "seda:a", key.getEndpointUri());
+
+ EventDrivenConsumerRoute consumerRoute = assertIsInstanceOf(EventDrivenConsumerRoute.class, route);
+ Processor processor = consumerRoute.getProcessor();
+ processor = unwrap(processor);
+ LoggingErrorHandler loggingProcessor = assertIsInstanceOf(LoggingErrorHandler.class, processor);
+ processor = unwrap(loggingProcessor.getOutput());
+ SendProcessor sendProcessor = assertIsInstanceOf(SendProcessor.class, processor);
+ log.debug("Found sendProcessor: " + sendProcessor);
+ }
+ }
+
+ public void testGetTheDefaultErrorHandlerFromContext() throws Exception {
+
+ RouteBuilder builder = new RouteBuilder() {
+ public void configure() {
+ from("seda:a").to("seda:b");
+ }
+ };
+
+ List<Route> list = getRouteList(builder);
+ assertEquals("Number routes created" + list, 1, list.size());
+ for (Route route : list) {
+ Endpoint key = route.getEndpoint();
+ assertEquals("From endpoint", "seda:a", key.getEndpointUri());
+
+ EventDrivenConsumerRoute consumerRoute = assertIsInstanceOf(EventDrivenConsumerRoute.class, route);
+ Processor processor = consumerRoute.getProcessor();
+ processor = unwrap(processor);
+
+ DeadLetterChannel deadLetterChannel = assertIsInstanceOf(DeadLetterChannel.class, processor);
+
+ RedeliveryPolicy redeliveryPolicy = deadLetterChannel.getRedeliveryPolicy();
+
+ assertEquals("getMaximumRedeliveries()", 1, redeliveryPolicy.getMaximumRedeliveries());
+ assertEquals("isUseExponentialBackOff()", true, redeliveryPolicy.isUseExponentialBackOff());
+ }
+ }
+
+}
|
aa25be6a631c53788feacb19209b12924bd93f51
|
intellij-community
|
better messages format--
|
p
|
https://github.com/JetBrains/intellij-community
|
diff --git a/jps/jps-builders/src/org/jetbrains/jps/incremental/IncProjectBuilder.java b/jps/jps-builders/src/org/jetbrains/jps/incremental/IncProjectBuilder.java
index 65bfcf76a392a..fced512e04cf4 100644
--- a/jps/jps-builders/src/org/jetbrains/jps/incremental/IncProjectBuilder.java
+++ b/jps/jps-builders/src/org/jetbrains/jps/incremental/IncProjectBuilder.java
@@ -58,7 +58,6 @@
public class IncProjectBuilder {
private static final Logger LOG = Logger.getInstance("#org.jetbrains.jps.incremental.IncProjectBuilder");
- public static final String BUILD_NAME = "EXTERNAL BUILD";
private static final String CLASSPATH_INDEX_FINE_NAME = "classpath.index";
private static final boolean GENERATE_CLASSPATH_INDEX = Boolean.parseBoolean(System.getProperty(GlobalOptions.GENERATE_CLASSPATH_INDEX_OPTION, "false"));
private static final int MAX_BUILDER_THREADS;
@@ -166,7 +165,7 @@ public void run() {
cause instanceof MappingFailedException ||
cause instanceof IOException) {
myMessageDispatcher.processMessage(new CompilerMessage(
- BUILD_NAME, BuildMessage.Kind.INFO,
+ "", BuildMessage.Kind.INFO,
"Internal caches are corrupted or have outdated format, forcing project rebuild: " +
e.getMessage())
);
@@ -183,7 +182,7 @@ public void run() {
}
else {
// the reason for the build stop is unexpected internal error, report it
- myMessageDispatcher.processMessage(new CompilerMessage(BUILD_NAME, cause));
+ myMessageDispatcher.processMessage(new CompilerMessage("", cause));
}
}
}
@@ -566,7 +565,7 @@ private boolean runBuildersForChunk(CompileContext context, final BuildTargetChu
}
else {
context.processMessage(new CompilerMessage(
- BUILD_NAME, BuildMessage.Kind.ERROR, "Cannot build " + target.getPresentableName() + " because it is included into a circular dependency")
+ "", BuildMessage.Kind.ERROR, "Cannot build " + target.getPresentableName() + " because it is included into a circular dependency")
);
return false;
}
|
992ff293a9dc7efbedaa8290316f0f4e9e030439
|
ReactiveX-RxJava
|
Better name for worker class running scheduled- actions--
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/schedulers/CachedThreadScheduler.java b/rxjava-core/src/main/java/rx/schedulers/CachedThreadScheduler.java
index 2feba948fb..acd5be7740 100644
--- a/rxjava-core/src/main/java/rx/schedulers/CachedThreadScheduler.java
+++ b/rxjava-core/src/main/java/rx/schedulers/CachedThreadScheduler.java
@@ -36,12 +36,12 @@
private static final class CachedWorkerPool {
private final long keepAliveTime;
- private final ConcurrentLinkedQueue<PoolWorker> expiringQueue;
+ private final ConcurrentLinkedQueue<ThreadWorker> expiringWorkerQueue;
private final ScheduledExecutorService evictExpiredWorkerExecutor;
CachedWorkerPool(long keepAliveTime, TimeUnit unit) {
this.keepAliveTime = unit.toNanos(keepAliveTime);
- this.expiringQueue = new ConcurrentLinkedQueue<PoolWorker>();
+ this.expiringWorkerQueue = new ConcurrentLinkedQueue<ThreadWorker>();
evictExpiredWorkerExecutor = Executors.newScheduledThreadPool(1, EVICTOR_THREAD_FACTORY);
evictExpiredWorkerExecutor.scheduleWithFixedDelay(
@@ -58,35 +58,35 @@ public void run() {
60L, TimeUnit.SECONDS
);
- PoolWorker get() {
- while (!expiringQueue.isEmpty()) {
- PoolWorker poolWorker = expiringQueue.poll();
- if (poolWorker != null) {
- return poolWorker;
+ ThreadWorker get() {
+ while (!expiringWorkerQueue.isEmpty()) {
+ ThreadWorker threadWorker = expiringWorkerQueue.poll();
+ if (threadWorker != null) {
+ return threadWorker;
}
}
// No cached worker found, so create a new one.
- return new PoolWorker(WORKER_THREAD_FACTORY);
+ return new ThreadWorker(WORKER_THREAD_FACTORY);
}
- void release(PoolWorker poolWorker) {
+ void release(ThreadWorker threadWorker) {
// Refresh expire time before putting worker back in pool
- poolWorker.setExpirationTime(now() + keepAliveTime);
+ threadWorker.setExpirationTime(now() + keepAliveTime);
- expiringQueue.add(poolWorker);
+ expiringWorkerQueue.offer(threadWorker);
}
void evictExpiredWorkers() {
- if (!expiringQueue.isEmpty()) {
+ if (!expiringWorkerQueue.isEmpty()) {
long currentTimestamp = now();
- Iterator<PoolWorker> poolWorkerIterator = expiringQueue.iterator();
- while (poolWorkerIterator.hasNext()) {
- PoolWorker poolWorker = poolWorkerIterator.next();
- if (poolWorker.getExpirationTime() <= currentTimestamp) {
- poolWorkerIterator.remove();
- poolWorker.unsubscribe();
+ Iterator<ThreadWorker> threadWorkerIterator = expiringWorkerQueue.iterator();
+ while (threadWorkerIterator.hasNext()) {
+ ThreadWorker threadWorker = threadWorkerIterator.next();
+ if (threadWorker.getExpirationTime() <= currentTimestamp) {
+ threadWorkerIterator.remove();
+ threadWorker.unsubscribe();
} else {
// Queue is ordered with the worker that will expire first in the beginning, so when we
// find a non-expired worker we can stop evicting.
@@ -108,20 +108,20 @@ public Worker createWorker() {
private static class EventLoopWorker extends Scheduler.Worker {
private final CompositeSubscription innerSubscription = new CompositeSubscription();
- private final PoolWorker poolWorker;
+ private final ThreadWorker threadWorker;
volatile int once;
static final AtomicIntegerFieldUpdater<EventLoopWorker> ONCE_UPDATER
= AtomicIntegerFieldUpdater.newUpdater(EventLoopWorker.class, "once");
- EventLoopWorker(PoolWorker poolWorker) {
- this.poolWorker = poolWorker;
+ EventLoopWorker(ThreadWorker threadWorker) {
+ this.threadWorker = threadWorker;
}
@Override
public void unsubscribe() {
if (ONCE_UPDATER.compareAndSet(this, 0, 1)) {
// unsubscribe should be idempotent, so only do this once
- CachedWorkerPool.INSTANCE.release(poolWorker);
+ CachedWorkerPool.INSTANCE.release(threadWorker);
}
innerSubscription.unsubscribe();
}
@@ -143,17 +143,17 @@ public Subscription schedule(Action0 action, long delayTime, TimeUnit unit) {
return Subscriptions.empty();
}
- NewThreadScheduler.NewThreadWorker.ScheduledAction s = poolWorker.scheduleActual(action, delayTime, unit);
+ NewThreadScheduler.NewThreadWorker.ScheduledAction s = threadWorker.scheduleActual(action, delayTime, unit);
innerSubscription.add(s);
s.addParent(innerSubscription);
return s;
}
}
- private static final class PoolWorker extends NewThreadScheduler.NewThreadWorker {
+ private static final class ThreadWorker extends NewThreadScheduler.NewThreadWorker {
private long expirationTime;
- PoolWorker(ThreadFactory threadFactory) {
+ ThreadWorker(ThreadFactory threadFactory) {
super(threadFactory);
this.expirationTime = 0L;
}
|
191f023cf5253ea90647bc091dcaf55ccdce81cc
|
ReactiveX-RxJava
|
1.x: Fix Completable swallows- OnErrorNotImplementedException (-3904)--
|
c
|
https://github.com/ReactiveX/RxJava
|
diff --git a/src/main/java/rx/Completable.java b/src/main/java/rx/Completable.java
index 1fc39fbfda..8a2a4121d2 100644
--- a/src/main/java/rx/Completable.java
+++ b/src/main/java/rx/Completable.java
@@ -1972,6 +1972,7 @@ public final void subscribe(CompletableSubscriber s) {
throw ex;
} catch (Throwable ex) {
ERROR_HANDLER.handleError(ex);
+ Exceptions.throwIfFatal(ex);
throw toNpe(ex);
}
}
diff --git a/src/test/java/rx/CompletableTest.java b/src/test/java/rx/CompletableTest.java
index 2493da2356..8ee2f747d3 100644
--- a/src/test/java/rx/CompletableTest.java
+++ b/src/test/java/rx/CompletableTest.java
@@ -2813,6 +2813,30 @@ public void call() {
});
}
+ @Test(expected = OnErrorNotImplementedException.class)
+ public void propagateExceptionSubscribeEmpty() {
+ error.completable.toSingleDefault(0).subscribe();
+ }
+
+ @Test(expected = OnErrorNotImplementedException.class)
+ public void propagateExceptionSubscribeOneAction() {
+ error.completable.toSingleDefault(1).subscribe(new Action1<Integer>() {
+ @Override
+ public void call(Integer integer) {
+ }
+ });
+ }
+
+ @Test(expected = OnErrorNotImplementedException.class)
+ public void propagateExceptionSubscribeOneActionThrowFromOnSuccess() {
+ normal.completable.toSingleDefault(1).subscribe(new Action1<Integer>() {
+ @Override
+ public void call(Integer integer) {
+ throw new TestException();
+ }
+ });
+ }
+
@Test(timeout = 1000)
public void timeoutEmitError() {
Throwable e = Completable.never().timeout(100, TimeUnit.MILLISECONDS).get();
|
319e34aa1743781b453df5df51f03183b123560a
|
restlet-framework-java
|
- Continued SIP transaction support--
|
a
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet.example/src/org/restlet/example/ext/sip/UacClientResource.java b/modules/org.restlet.example/src/org/restlet/example/ext/sip/UacClientResource.java
index b5ef0cb714..2c1372424b 100644
--- a/modules/org.restlet.example/src/org/restlet/example/ext/sip/UacClientResource.java
+++ b/modules/org.restlet.example/src/org/restlet/example/ext/sip/UacClientResource.java
@@ -30,11 +30,14 @@
package org.restlet.example.ext.sip;
+import java.util.logging.Level;
+
import org.restlet.Client;
import org.restlet.Context;
import org.restlet.data.Protocol;
+import org.restlet.engine.Engine;
+import org.restlet.ext.sip.Address;
import org.restlet.ext.sip.SipClientResource;
-import org.restlet.resource.ClientResource;
/**
* Example SIP client resource for the UAC test scenario.
@@ -44,6 +47,7 @@
public class UacClientResource implements UacResource {
public static void main(String[] args) {
+ Engine.setLogLevel(Level.FINE);
UacClientResource cr = new UacClientResource("sip:bob@locahost");
cr.start();
cr.acknowledge();
@@ -54,7 +58,7 @@ public static void main(String[] args) {
private UacResource proxy;
/** The internal client resource. */
- private ClientResource clientResource;
+ private SipClientResource clientResource;
/**
* Constructor.
@@ -64,6 +68,12 @@ public static void main(String[] args) {
*/
public UacClientResource(String uri) {
this.clientResource = new SipClientResource(uri);
+ this.clientResource.setCallId("[email protected]");
+ this.clientResource.setCommandSequence("314159");
+ this.clientResource.setFrom(new Address("sip:[email protected]",
+ "Alice"));
+ this.clientResource.setTo(new Address("sip:[email protected]", "Bob"));
+
Client client = new Client(new Context(), Protocol.SIP);
client.getContext().getParameters().add("minThreads", "1");
client.getContext().getParameters().add("tracing", "true");
diff --git a/modules/org.restlet.example/src/org/restlet/example/ext/sip/UacServerResource.java b/modules/org.restlet.example/src/org/restlet/example/ext/sip/UacServerResource.java
index f16df34f69..e9be46b75e 100644
--- a/modules/org.restlet.example/src/org/restlet/example/ext/sip/UacServerResource.java
+++ b/modules/org.restlet.example/src/org/restlet/example/ext/sip/UacServerResource.java
@@ -34,10 +34,12 @@
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicLong;
+import java.util.logging.Level;
import org.restlet.Context;
import org.restlet.Server;
import org.restlet.data.Protocol;
+import org.restlet.engine.Engine;
import org.restlet.ext.sip.SipResponse;
import org.restlet.ext.sip.SipServerResource;
import org.restlet.ext.sip.SipStatus;
@@ -54,6 +56,7 @@ public class UacServerResource extends SipServerResource implements UacResource
private static boolean TRACE;
public static void main(String[] args) throws Exception {
+ Engine.setLogLevel(Level.FINE);
Server server = null;
if (args.length == 1) {
@@ -161,8 +164,8 @@ private void trace() {
if (TRACE) {
System.out.println("--------------start trace--------------------");
System.out.println("Method: " + getMethod());
- System.out.println("Call ID: " + getRequestCallId());
- System.out.println("Call Sequence: " + getCallSequence());
+ System.out.println("Call ID: " + getCallId());
+ System.out.println("Call Sequence: " + getCommandSequence());
System.out.println("To: " + getTo());
System.out.println("From: " + getFrom());
System.out.println("Max Forwards: " + getMaxForwards());
diff --git a/modules/org.restlet.ext.sip/src/org/restlet/ext/sip/Address.java b/modules/org.restlet.ext.sip/src/org/restlet/ext/sip/Address.java
index b0facd0872..78c371d6ab 100644
--- a/modules/org.restlet.ext.sip/src/org/restlet/ext/sip/Address.java
+++ b/modules/org.restlet.ext.sip/src/org/restlet/ext/sip/Address.java
@@ -33,6 +33,7 @@
import org.restlet.data.Form;
import org.restlet.data.Parameter;
import org.restlet.data.Reference;
+import org.restlet.ext.sip.internal.AddressWriter;
import org.restlet.util.Series;
/**
@@ -41,7 +42,7 @@
*
* @author Thierry Boileau
*/
-public class Address {
+public class Address implements Cloneable {
/** The optional name displayed. */
private String displayName;
@@ -83,6 +84,34 @@ public Address(Reference reference, String displayName) {
this.displayName = displayName;
}
+ /**
+ * Constructor.
+ *
+ * @param reference
+ * The address reference.
+ * @param displayName
+ * The name displayed.
+ */
+ public Address(String reference, String displayName) {
+ this(new Reference(reference), displayName);
+ }
+
+ @Override
+ protected Object clone() throws CloneNotSupportedException {
+ Address result = (Address) super.clone();
+ result.reference = reference.clone();
+
+ if (parameters != null) {
+ result.parameters = new Form();
+
+ for (Parameter param : parameters) {
+ result.parameters.add(param.getName(), param.getValue());
+ }
+ }
+
+ return result;
+ }
+
/**
* Returns the optional name displayed.
*
@@ -143,4 +172,9 @@ public void setReference(Reference reference) {
this.reference = reference;
}
+ @Override
+ public String toString() {
+ return AddressWriter.write(this);
+ }
+
}
diff --git a/modules/org.restlet.ext.sip/src/org/restlet/ext/sip/SipClientResource.java b/modules/org.restlet.ext.sip/src/org/restlet/ext/sip/SipClientResource.java
index 9db8b21c57..6347ee1fbf 100644
--- a/modules/org.restlet.ext.sip/src/org/restlet/ext/sip/SipClientResource.java
+++ b/modules/org.restlet.ext.sip/src/org/restlet/ext/sip/SipClientResource.java
@@ -565,6 +565,36 @@ public void register(Address to) throws ResourceException {
handle(SipMethod.REGISTER);
}
+ /**
+ * Sets the identifier of the call.
+ *
+ * @param callId
+ * The identifier of the call.
+ */
+ public void setCallId(String callId) {
+ getRequest().setCallId(callId);
+ }
+
+ /**
+ * Sets the identifier of the command.
+ *
+ * @param commandSequence
+ * The identifier of the command.
+ */
+ public void setCommandSequence(String commandSequence) {
+ getRequest().setCommandSequence(commandSequence);
+ }
+
+ /**
+ * Sets the description of the request's initiator.
+ *
+ * @param from
+ * The description of the request's initiator.
+ */
+ public void setFrom(Address from) {
+ getRequest().setFrom(from);
+ }
+
@Override
public void setRequest(Request request) {
if (request instanceof SipRequest) {
@@ -595,6 +625,16 @@ public void setResponse(Response response) {
}
}
+ /**
+ * Sets the logical recipient of the request.
+ *
+ * @param to
+ * The logical recipient of the request.
+ */
+ public void setTo(Address to) {
+ getRequest().setTo(to);
+ }
+
/**
* Requests current state and state updates from a remote node.
*
diff --git a/modules/org.restlet.ext.sip/src/org/restlet/ext/sip/SipServerResource.java b/modules/org.restlet.ext.sip/src/org/restlet/ext/sip/SipServerResource.java
index cb73dec934..97d72459de 100644
--- a/modules/org.restlet.ext.sip/src/org/restlet/ext/sip/SipServerResource.java
+++ b/modules/org.restlet.ext.sip/src/org/restlet/ext/sip/SipServerResource.java
@@ -30,8 +30,9 @@
package org.restlet.ext.sip;
-import java.util.List;
-
+import org.restlet.Context;
+import org.restlet.Request;
+import org.restlet.Response;
import org.restlet.resource.ServerResource;
/**
@@ -42,11 +43,20 @@
public class SipServerResource extends ServerResource {
/**
- * Returns the request's call sequence.
+ * Returns the request's call ID.
*
- * @return The request's call sequence.
+ * @return The request's call ID.
*/
- public String getCallSequence() {
+ public String getCallId() {
+ return getRequest().getCallId();
+ }
+
+ /**
+ * Returns the request's command sequence.
+ *
+ * @return The request's command sequence.
+ */
+ public String getCommandSequence() {
return getRequest().getCommandSequence();
}
@@ -64,47 +74,11 @@ public SipRequest getRequest() {
return (SipRequest) super.getRequest();
}
- /**
- * Returns the request's call ID.
- *
- * @return The request's call ID.
- */
- public String getRequestCallId() {
- return getRequest().getCallId();
- }
-
@Override
public SipResponse getResponse() {
return (SipResponse) super.getResponse();
}
- /**
- * Returns the response's call ID.
- *
- * @return The response's call ID.
- */
- public String getResponseCallId() {
- return getResponse().getCallId();
- }
-
- /**
- * Returns the request's list of Via entries.
- *
- * @return The request's list of Via entries.
- */
- public List<SipRecipientInfo> getSipRequestRecipientsInfo() {
- return getRequest().getSipRecipientsInfo();
- }
-
- /**
- * Returns the response's list of Via entries.
- *
- * @return The response's list of Via entries.
- */
- public List<SipRecipientInfo> getSipResponseRecipientsInfo() {
- return getResponse().getSipRecipientsInfo();
- }
-
/**
* Returns the request recipient's address.
*
@@ -114,13 +88,27 @@ public Address getTo() {
return getRequest().getTo();
}
- /**
- * Sets the response's call ID.
- *
- * @param callId
- * The call ID.
- */
- public void setResponseCallId(String callId) {
- getResponse().setCallId(callId);
+ @Override
+ public void init(Context context, Request request, Response response) {
+ try {
+ SipResponse sipResponse = (SipResponse) response;
+ SipRequest sipRequest = (SipRequest) request;
+
+ sipResponse.setCallId(sipRequest.getCallId());
+ sipResponse.setCommandSequence(sipRequest.getCommandSequence());
+
+ if (sipRequest.getFrom() != null) {
+ sipResponse.setFrom((Address) sipRequest.getFrom().clone());
+ }
+
+ if (sipRequest.getTo() != null) {
+ sipResponse.setTo((Address) sipRequest.getTo().clone());
+ }
+ } catch (CloneNotSupportedException e) {
+ doCatch(e);
+ }
+
+ super.init(context, request, response);
}
+
}
diff --git a/modules/org.restlet/src/org/restlet/engine/connector/OutboundWay.java b/modules/org.restlet/src/org/restlet/engine/connector/OutboundWay.java
index ee14894ad7..f6da80d9a8 100644
--- a/modules/org.restlet/src/org/restlet/engine/connector/OutboundWay.java
+++ b/modules/org.restlet/src/org/restlet/engine/connector/OutboundWay.java
@@ -470,7 +470,7 @@ protected void writeLine() throws IOException {
case START:
if (getHelper().getLogger().isLoggable(Level.FINE)) {
getHelper().getLogger().fine(
- "Writing message from "
+ "Writing message to "
+ getConnection().getSocketAddress());
}
diff --git a/modules/org.restlet/src/org/restlet/engine/connector/ServerConnectionController.java b/modules/org.restlet/src/org/restlet/engine/connector/ServerConnectionController.java
index 269c2eabaa..e0d7e94ff2 100644
--- a/modules/org.restlet/src/org/restlet/engine/connector/ServerConnectionController.java
+++ b/modules/org.restlet/src/org/restlet/engine/connector/ServerConnectionController.java
@@ -129,7 +129,7 @@ protected void onSelected(SelectionKey key)
if (getHelper().getLogger().isLoggable(Level.FINE)) {
getHelper().getLogger().fine(
- "Connection with \""
+ "Connection from \""
+ connection.getSocketAddress()
+ "\" accepted. New count: "
+ getHelper().getConnections()
diff --git a/modules/org.restlet/src/org/restlet/engine/io/IoBuffer.java b/modules/org.restlet/src/org/restlet/engine/io/IoBuffer.java
index 9d767d1791..a9e9f660b0 100644
--- a/modules/org.restlet/src/org/restlet/engine/io/IoBuffer.java
+++ b/modules/org.restlet/src/org/restlet/engine/io/IoBuffer.java
@@ -255,7 +255,7 @@ public int refill(ReadableByteChannel sourceChannel) throws IOException {
if (result > 0) {
getBytes().flip();
setState(BufferState.DRAINING);
- Context.getCurrentLogger().fine(
+ Context.getCurrentLogger().finer(
"Refilled buffer with " + result + " byte(s)");
}
}
diff --git a/modules/org.restlet/src/org/restlet/resource/UniformResource.java b/modules/org.restlet/src/org/restlet/resource/UniformResource.java
index 6a05bc85d2..20ce81b96a 100644
--- a/modules/org.restlet/src/org/restlet/resource/UniformResource.java
+++ b/modules/org.restlet/src/org/restlet/resource/UniformResource.java
@@ -605,7 +605,7 @@ public StatusService getStatusService() {
* @param response
* The handled response.
*/
- public final void init(Context context, Request request, Response response) {
+ public void init(Context context, Request request, Response response) {
this.context = context;
this.request = request;
this.response = response;
|
9cea634f7d4276b87821f3ab7c160fa67b2c85b1
|
hadoop
|
HDFS-2414. Fix TestDFSRollback to avoid spurious- failures. Contributed by Todd Lipcon.--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1180540 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 465458bec402c..1fd1b345d4870 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -1032,6 +1032,8 @@ Release 0.23.0 - Unreleased
HDFS-2412. Add backwards-compatibility layer for renamed FSConstants
class (todd)
+ HDFS-2414. Fix TestDFSRollback to avoid spurious failures. (todd)
+
BREAKDOWN OF HDFS-1073 SUBTASKS
HDFS-1521. Persist transaction ID on disk between NN restarts.
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRollback.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRollback.java
index cdf3665af181c..687f5633ff655 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRollback.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRollback.java
@@ -37,6 +37,7 @@
import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil;
import org.apache.hadoop.util.StringUtils;
+import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
/**
@@ -263,10 +264,14 @@ public void testRollback() throws Exception {
UpgradeUtilities.createNameNodeStorageDirs(nameNodeDirs, "current");
baseDirs = UpgradeUtilities.createNameNodeStorageDirs(nameNodeDirs, "previous");
for (File f : baseDirs) {
- UpgradeUtilities.corruptFile(new File(f,"VERSION"));
+ UpgradeUtilities.corruptFile(
+ new File(f,"VERSION"),
+ "layoutVersion".getBytes(Charsets.UTF_8),
+ "xxxxxxxxxxxxx".getBytes(Charsets.UTF_8));
}
startNameNodeShouldFail(StartupOption.ROLLBACK,
"file VERSION has layoutVersion missing");
+
UpgradeUtilities.createEmptyDirs(nameNodeDirs);
log("NameNode rollback with old layout version in previous", numDirs);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgrade.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgrade.java
index 251f23dee706d..a308c230cb0ac 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgrade.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgrade.java
@@ -39,6 +39,7 @@
import org.junit.Ignore;
import org.junit.Test;
+import com.google.common.base.Charsets;
import com.google.common.base.Joiner;
import static org.junit.Assert.*;
@@ -303,7 +304,10 @@ public void testUpgrade() throws Exception {
log("NameNode upgrade with corrupt version file", numDirs);
baseDirs = UpgradeUtilities.createNameNodeStorageDirs(nameNodeDirs, "current");
for (File f : baseDirs) {
- UpgradeUtilities.corruptFile(new File (f,"VERSION"));
+ UpgradeUtilities.corruptFile(
+ new File(f,"VERSION"),
+ "layoutVersion".getBytes(Charsets.UTF_8),
+ "xxxxxxxxxxxxx".getBytes(Charsets.UTF_8));
}
startNameNodeShouldFail(StartupOption.UPGRADE);
UpgradeUtilities.createEmptyDirs(nameNodeDirs);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java
index 337fa8a17c07c..0b6bceafafcfe 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java
@@ -24,10 +24,8 @@
import java.io.FileInputStream;
import java.io.IOException;
import java.io.OutputStream;
-import java.io.RandomAccessFile;
import java.net.URI;
import java.util.Arrays;
-import java.util.Random;
import java.util.Collections;
import java.util.zip.CRC32;
import org.apache.hadoop.conf.Configuration;
@@ -53,6 +51,10 @@
import org.apache.hadoop.hdfs.server.namenode.NNStorage;
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
+import com.google.common.base.Preconditions;
+import com.google.common.io.Files;
+import com.google.common.primitives.Bytes;
+
/**
* This class defines a number of static helper methods used by the
* DFS Upgrade unit tests. By default, a singleton master populated storage
@@ -483,20 +485,26 @@ public static void createBlockPoolVersionFile(File bpDir,
* @throws IllegalArgumentException if the given file is not a file
* @throws IOException if an IOException occurs while reading or writing the file
*/
- public static void corruptFile(File file) throws IOException {
+ public static void corruptFile(File file,
+ byte[] stringToCorrupt,
+ byte[] replacement) throws IOException {
+ Preconditions.checkArgument(replacement.length == stringToCorrupt.length);
if (!file.isFile()) {
throw new IllegalArgumentException(
- "Given argument is not a file:" + file);
+ "Given argument is not a file:" + file);
}
- RandomAccessFile raf = new RandomAccessFile(file,"rws");
- Random random = new Random();
- for (long i = 0; i < raf.length(); i++) {
- raf.seek(i);
- if (random.nextBoolean()) {
- raf.writeByte(random.nextInt());
- }
+ byte[] data = Files.toByteArray(file);
+ int index = Bytes.indexOf(data, stringToCorrupt);
+ if (index == -1) {
+ throw new IOException(
+ "File " + file + " does not contain string " +
+ new String(stringToCorrupt));
+ }
+
+ for (int i = 0; i < stringToCorrupt.length; i++) {
+ data[index + i] = replacement[i];
}
- raf.close();
+ Files.write(data, file);
}
/**
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/FSImageTestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/FSImageTestUtil.java
index 4a8edb8475a89..ce9b224f22c0f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/FSImageTestUtil.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/FSImageTestUtil.java
@@ -29,6 +29,7 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
@@ -233,11 +234,49 @@ public static void assertParallelFilesAreIdentical(List<File> dirs,
// recurse
assertParallelFilesAreIdentical(sameNameList, ignoredFileNames);
} else {
- assertFileContentsSame(sameNameList.toArray(new File[0]));
+ if ("VERSION".equals(sameNameList.get(0).getName())) {
+ assertPropertiesFilesSame(sameNameList.toArray(new File[0]));
+ } else {
+ assertFileContentsSame(sameNameList.toArray(new File[0]));
+ }
}
}
}
+ /**
+ * Assert that a set of properties files all contain the same data.
+ * We cannot simply check the md5sums here, since Properties files
+ * contain timestamps -- thus, two properties files from the same
+ * saveNamespace operation may actually differ in md5sum.
+ * @param propFiles the files to compare
+ * @throws IOException if the files cannot be opened or read
+ * @throws AssertionError if the files differ
+ */
+ public static void assertPropertiesFilesSame(File[] propFiles)
+ throws IOException {
+ Set<Map.Entry<Object, Object>> prevProps = null;
+
+ for (File f : propFiles) {
+ Properties props;
+ FileInputStream is = new FileInputStream(f);
+ try {
+ props = new Properties();
+ props.load(is);
+ } finally {
+ IOUtils.closeStream(is);
+ }
+ if (prevProps == null) {
+ prevProps = props.entrySet();
+ } else {
+ Set<Entry<Object,Object>> diff =
+ Sets.symmetricDifference(prevProps, props.entrySet());
+ if (!diff.isEmpty()) {
+ fail("Properties file " + f + " differs from " + propFiles[0]);
+ }
+ }
+ }
+ }
+
/**
* Assert that all of the given paths have the exact same
* contents
|
6b07c53c61ed1c2dcd6c74bd64db7477bdafa424
|
spring-framework
|
Allow file locations for resource handling--Prior to this change, location checks for serving resources would append-`/` to the location path it didn't already have one.--This commit makes sure not to append a `/` if the provided location is-actually a file.--Issue: SPR-12747-
|
c
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-webmvc/src/main/java/org/springframework/web/servlet/resource/PathResourceResolver.java b/spring-webmvc/src/main/java/org/springframework/web/servlet/resource/PathResourceResolver.java
index 475f40b39095..4750d9c5d642 100644
--- a/spring-webmvc/src/main/java/org/springframework/web/servlet/resource/PathResourceResolver.java
+++ b/spring-webmvc/src/main/java/org/springframework/web/servlet/resource/PathResourceResolver.java
@@ -179,7 +179,8 @@ else if (resource instanceof ServletContextResource) {
resourcePath = resource.getURL().getPath();
locationPath = StringUtils.cleanPath(location.getURL().getPath());
}
- locationPath = (locationPath.endsWith("/") || locationPath.isEmpty() ? locationPath : locationPath + "/");
+ locationPath = (StringUtils.getFilenameExtension(locationPath) != null
+ || locationPath.endsWith("/") || locationPath.isEmpty() ? locationPath : locationPath + "/");
if (!resourcePath.startsWith(locationPath)) {
return false;
}
diff --git a/spring-webmvc/src/test/java/org/springframework/web/servlet/resource/PathResourceResolverTests.java b/spring-webmvc/src/test/java/org/springframework/web/servlet/resource/PathResourceResolverTests.java
index d4839e53edc1..eb9162ac9e58 100644
--- a/spring-webmvc/src/test/java/org/springframework/web/servlet/resource/PathResourceResolverTests.java
+++ b/spring-webmvc/src/test/java/org/springframework/web/servlet/resource/PathResourceResolverTests.java
@@ -117,4 +117,11 @@ public void checkRelativeLocation() throws Exception {
assertNotNull(this.resolver.resolveResource(null, "main.css", Arrays.asList(location), null));
}
+ // SPR-12747
+ @Test
+ public void checkFileLocation() throws Exception {
+ Resource resource = new ClassPathResource("test/main.css", PathResourceResolver.class);
+ assertTrue(this.resolver.checkResource(resource, resource));
+ }
+
}
|
0f964dada0a88faeac1b93bde703a75662aaebef
|
intellij-community
|
aware of not sourcemap file--
|
p
|
https://github.com/JetBrains/intellij-community
|
diff --git a/platform/script-debugger/backend/src/org/jetbrains/debugger/sourcemap/SourceMapDecoder.java b/platform/script-debugger/backend/src/org/jetbrains/debugger/sourcemap/SourceMapDecoder.java
index a0f475741b46b..431a1156ae891 100644
--- a/platform/script-debugger/backend/src/org/jetbrains/debugger/sourcemap/SourceMapDecoder.java
+++ b/platform/script-debugger/backend/src/org/jetbrains/debugger/sourcemap/SourceMapDecoder.java
@@ -67,10 +67,10 @@ public static SourceMap decode(@NotNull CharSequence in, @NotNull Function<List<
@Nullable
private static SourceMap parseMap(JsonReaderEx reader,
- int line,
- int column,
- List<MappingEntry> mappings,
- @NotNull Function<List<String>, SourceResolver> sourceResolverFactory) throws IOException {
+ int line,
+ int column,
+ List<MappingEntry> mappings,
+ @NotNull Function<List<String>, SourceResolver> sourceResolverFactory) throws IOException {
reader.beginObject();
String sourceRoot = null;
JsonReaderEx sourcesReader = null;
@@ -120,6 +120,12 @@ else if (propertyName.equals("file")) {
}
reader.close();
+ // check it before other checks, probably it is not sourcemap file
+ if (StringUtil.isEmpty(encodedMappings)) {
+ // empty map
+ return null;
+ }
+
if (version != 3) {
throw new IOException("Unsupported sourcemap version: " + version);
}
@@ -128,11 +134,6 @@ else if (propertyName.equals("file")) {
throw new IOException("sources is not specified");
}
- if (StringUtil.isEmpty(encodedMappings)) {
- // empty map
- return null;
- }
-
List<String> sources = readSources(sourcesReader, sourceRoot);
@SuppressWarnings("unchecked")
|
f8a5c25714f866a85290634e7b0344f02f6b930b
|
kotlin
|
Fix for the code to compile--
|
c
|
https://github.com/JetBrains/kotlin
|
diff --git a/idea/src/org/jetbrains/jet/lang/cfg/Label.java b/idea/src/org/jetbrains/jet/lang/cfg/Label.java
index ce3472befa2ae..9996252097706 100644
--- a/idea/src/org/jetbrains/jet/lang/cfg/Label.java
+++ b/idea/src/org/jetbrains/jet/lang/cfg/Label.java
@@ -3,19 +3,6 @@
/**
* @author abreslav
*/
-public class Label {
- private final String name;
-
- public Label(String name) {
- this.name = name;
- }
-
- public String getName() {
- return name;
- }
-
- @Override
- public String toString() {
- return name;
- }
+public interface Label {
+ String getName();
}
diff --git a/idea/src/org/jetbrains/jet/lang/cfg/pseudocode/JetControlFlowInstructionsGenerator.java b/idea/src/org/jetbrains/jet/lang/cfg/pseudocode/JetControlFlowInstructionsGenerator.java
index 0cfe7f36960a7..2948f9b131891 100644
--- a/idea/src/org/jetbrains/jet/lang/cfg/pseudocode/JetControlFlowInstructionsGenerator.java
+++ b/idea/src/org/jetbrains/jet/lang/cfg/pseudocode/JetControlFlowInstructionsGenerator.java
@@ -27,7 +27,7 @@ public JetControlFlowInstructionsGenerator() {
}
private void pushBuilder() {
- Pseudocode parentPseudocode = builder == null ? new Pseudocode(null) : builders.peek().getPseudocode();
+ Pseudocode parentPseudocode = builder == null ? new Pseudocode() : builders.peek().getPseudocode();
JetControlFlowInstructionsGeneratorWorker worker = new JetControlFlowInstructionsGeneratorWorker(parentPseudocode);
builders.push(worker);
builder = worker;
@@ -90,16 +90,16 @@ public Label getExitPoint() {
}
}
- private class JetControlFlowInstructionsGeneratorWorker implements JetControlFlowBuilder {
- private final Stack<BlockInfo> loopInfo = new Stack<BlockInfo>();
- private final Stack<BlockInfo> subroutineInfo = new Stack<BlockInfo>();
+ private final Stack<BlockInfo> loopInfo = new Stack<BlockInfo>();
+ private final Stack<BlockInfo> subroutineInfo = new Stack<BlockInfo>();
+ private final Map<JetElement, BlockInfo> elementToBlockInfo = new HashMap<JetElement, BlockInfo>();
- private final Map<JetElement, BlockInfo> elementToBlockInfo = new HashMap<JetElement, BlockInfo>();
+ private class JetControlFlowInstructionsGeneratorWorker implements JetControlFlowBuilder {
private final Pseudocode pseudocode;
private JetControlFlowInstructionsGeneratorWorker(@Nullable Pseudocode parent) {
- this.pseudocode = new Pseudocode(parent);
+ this.pseudocode = new Pseudocode();
}
public Pseudocode getPseudocode() {
@@ -113,7 +113,7 @@ private void add(Instruction instruction) {
@NotNull
@Override
public final Label createUnboundLabel() {
- return new Label("l" + labelCount++);
+ return pseudocode.createLabel("l" + labelCount++);
}
@Override
diff --git a/idea/src/org/jetbrains/jet/lang/cfg/pseudocode/Pseudocode.java b/idea/src/org/jetbrains/jet/lang/cfg/pseudocode/Pseudocode.java
index d08d38f8c7761..9e40dd604b7b0 100644
--- a/idea/src/org/jetbrains/jet/lang/cfg/pseudocode/Pseudocode.java
+++ b/idea/src/org/jetbrains/jet/lang/cfg/pseudocode/Pseudocode.java
@@ -11,14 +11,44 @@
* @author abreslav
*/
public class Pseudocode {
+ public class PseudocodeLabel implements Label {
+ private final String name;
+
+ private PseudocodeLabel(String name) {
+ this.name = name;
+ }
+
+ @Override
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public String toString() {
+ return name;
+ }
+
+ @Nullable
+ private List<Instruction> resolve() {
+ Integer result = labels.get(this);
+ assert result != null;
+ return instructions.subList(result, instructions.size());
+ }
+
+ }
+
private final List<Instruction> instructions = new ArrayList<Instruction>();
private final Map<Label, Integer> labels = new LinkedHashMap<Label, Integer>();
- @Nullable
- private final Pseudocode parent;
+// @Nullable
+// private final Pseudocode parent;
+//
+// public Pseudocode(Pseudocode parent) {
+// this.parent = parent;
+// }
- public Pseudocode(Pseudocode parent) {
- this.parent = parent;
+ public PseudocodeLabel createLabel(String name) {
+ return new PseudocodeLabel(name);
}
public void addInstruction(Instruction instruction) {
@@ -29,15 +59,6 @@ public void addLabel(Label label) {
labels.put(label, instructions.size());
}
- @Nullable
- private Integer resolveLabel(Label targetLabel) {
- Integer result = labels.get(targetLabel);
- if (result == null && parent != null) {
- return parent.resolveLabel(targetLabel);
- }
- return result;
- }
-
public void postProcess() {
for (int i = 0, instructionsSize = instructions.size(); i < instructionsSize; i++) {
Instruction instruction = instructions.get(i);
@@ -95,22 +116,21 @@ public void visitInstruction(Instruction instruction) {
@NotNull
private Instruction getJumpTarget(@NotNull Label targetLabel) {
- Integer targetPosition = resolveLabel(targetLabel);
- return getTargetInstruction(targetPosition);
+ return getTargetInstruction(((PseudocodeLabel) targetLabel).resolve());
}
@NotNull
- private Instruction getTargetInstruction(@NotNull Integer targetPosition) {
+ private Instruction getTargetInstruction(@NotNull List<Instruction> instructions) {
while (true) {
- assert targetPosition != null;
- Instruction targetInstruction = instructions.get(targetPosition);
+ assert instructions != null;
+ Instruction targetInstruction = instructions.get(0);
if (false == targetInstruction instanceof UnconditionalJumpInstruction) {
return targetInstruction;
}
Label label = ((UnconditionalJumpInstruction) targetInstruction).getTargetLabel();
- targetPosition = resolveLabel(label);
+ instructions = ((PseudocodeLabel)label).resolve();
}
}
@@ -118,7 +138,7 @@ private Instruction getTargetInstruction(@NotNull Integer targetPosition) {
private Instruction getNextPosition(int currentPosition) {
int targetPosition = currentPosition + 1;
assert targetPosition < instructions.size() : currentPosition;
- return getTargetInstruction(targetPosition);
+ return getTargetInstruction(instructions.subList(targetPosition, instructions.size()));
}
public void dumpInstructions(@NotNull PrintStream out) {
@@ -140,6 +160,7 @@ public void dumpGraph(@NotNull final PrintStream out) {
private void dumpSubgraph(final PrintStream out, String graphHeader, final int[] count, String style) {
out.println(graphHeader + " {");
+ out.println(style);
final Map<Instruction, String> nodeToName = new HashMap<Instruction, String>();
for (Instruction node : instructions) {
@@ -174,7 +195,7 @@ else if (node instanceof FunctionLiteralValueInstruction) {
@Override
public void visitFunctionLiteralValue(FunctionLiteralValueInstruction instruction) {
int index = count[0];
- instruction.getBody().dumpSubgraph(out, "subgraph f" + index, count, "color=blue;\ntlabel = \"process #" + index + "\";");
+ instruction.getBody().dumpSubgraph(out, "subgraph cluster_" + index, count, "color=blue;\nlabel = \"f" + index + "\";");
printEdge(out, nodeToName.get(instruction), "n" + index, null);
visitInstructionWithNext(instruction);
}
@@ -228,7 +249,6 @@ public void visitInstruction(Instruction instruction) {
}
});
}
- out.println(style);
out.println("}");
}
diff --git a/idea/src/org/jetbrains/jet/lang/resolve/TopDownAnalyzer.java b/idea/src/org/jetbrains/jet/lang/resolve/TopDownAnalyzer.java
index 67a58cd7cd4dd..6d9bac71f1704 100644
--- a/idea/src/org/jetbrains/jet/lang/resolve/TopDownAnalyzer.java
+++ b/idea/src/org/jetbrains/jet/lang/resolve/TopDownAnalyzer.java
@@ -230,21 +230,21 @@ private void processFunction(@NotNull WritableScope declaringScope, JetFunction
declaringScope.addFunctionDescriptor(descriptor);
functions.put(function, descriptor);
- JetExpression bodyExpression = function.getBodyExpression();
- if (bodyExpression != null) {
- System.out.println("-------------");
- JetControlFlowInstructionsGenerator instructionsGenerator = new JetControlFlowInstructionsGenerator();
- new JetControlFlowProcessor(semanticServices, trace, instructionsGenerator).generate(function, bodyExpression);
- Pseudocode pseudocode = instructionsGenerator.getPseudocode();
- pseudocode.postProcess();
- pseudocode.dumpInstructions(System.out);
- System.out.println("-------------");
- try {
- pseudocode.dumpGraph(new PrintStream("/Users/abreslav/work/cfg.dot"));
- } catch (FileNotFoundException e) {
- e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
- }
- }
+// JetExpression bodyExpression = function.getBodyExpression();
+// if (bodyExpression != null) {
+// System.out.println("-------------");
+// JetControlFlowInstructionsGenerator instructionsGenerator = new JetControlFlowInstructionsGenerator();
+// new JetControlFlowProcessor(semanticServices, trace, instructionsGenerator).generate(function, bodyExpression);
+// Pseudocode pseudocode = instructionsGenerator.getPseudocode();
+// pseudocode.postProcess();
+// pseudocode.dumpInstructions(System.out);
+// System.out.println("-------------");
+// try {
+// pseudocode.dumpGraph(new PrintStream("/Users/abreslav/work/cfg.dot"));
+// } catch (FileNotFoundException e) {
+// e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
+// }
+// }
}
private void processProperty(WritableScope declaringScope, JetProperty property) {
diff --git a/idea/testData/psi/ControlStructures.txt b/idea/testData/psi/ControlStructures.txt
index d2c6a510a0403..45e96659ef3b7 100644
--- a/idea/testData/psi/ControlStructures.txt
+++ b/idea/testData/psi/ControlStructures.txt
@@ -86,7 +86,9 @@ JetFile: ControlStructures.jet
BREAK
PsiElement(break)('break')
PsiWhiteSpace(' ')
- PsiElement(LABEL_IDENTIFIER)('@la')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(LABEL_IDENTIFIER)('@la')
PsiElement(COMMA)(',')
PsiWhiteSpace('\n ')
VALUE_PARAMETER
@@ -120,7 +122,9 @@ JetFile: ControlStructures.jet
CONTINUE
PsiElement(continue)('continue')
PsiWhiteSpace(' ')
- PsiElement(LABEL_IDENTIFIER)('@la')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(LABEL_IDENTIFIER)('@la')
PsiElement(COMMA)(',')
PsiWhiteSpace('\n ')
VALUE_PARAMETER
@@ -207,7 +211,9 @@ JetFile: ControlStructures.jet
BREAK
PsiElement(break)('break')
PsiWhiteSpace(' ')
- PsiElement(LABEL_IDENTIFIER)('@la')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(LABEL_IDENTIFIER)('@la')
PsiWhiteSpace('\n ')
CONTINUE
PsiElement(continue)('continue')
@@ -219,7 +225,9 @@ JetFile: ControlStructures.jet
CONTINUE
PsiElement(continue)('continue')
PsiWhiteSpace(' ')
- PsiElement(LABEL_IDENTIFIER)('@la')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(LABEL_IDENTIFIER)('@la')
PsiWhiteSpace('\n ')
IF
PsiElement(if)('if')
diff --git a/idea/testData/psi/Labels.txt b/idea/testData/psi/Labels.txt
index ad500e937ff33..422cafa58e8e8 100644
--- a/idea/testData/psi/Labels.txt
+++ b/idea/testData/psi/Labels.txt
@@ -37,18 +37,24 @@ JetFile: Labels.jet
PsiWhiteSpace('\n\n ')
RETURN
PsiElement(return)('return')
- PsiElement(AT)('@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(AT)('@')
PsiWhiteSpace('\n ')
RETURN
PsiElement(return)('return')
- PsiElement(AT)('@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(AT)('@')
PsiWhiteSpace(' ')
INTEGER_CONSTANT
PsiElement(INTEGER_LITERAL)('1')
PsiWhiteSpace('\n ')
RETURN
PsiElement(return)('return')
- PsiElement(AT)('@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(AT)('@')
PsiWhiteSpace(' ')
PARENTHESIZED
PsiElement(LPAR)('(')
@@ -62,7 +68,9 @@ JetFile: Labels.jet
PsiWhiteSpace('\n ')
RETURN
PsiElement(return)('return')
- PsiElement(AT)('@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(AT)('@')
PsiWhiteSpace(' ')
PREFIX_EXPRESSION
OPERATION_REFERENCE
@@ -73,18 +81,24 @@ JetFile: Labels.jet
PsiWhiteSpace('\n\n ')
RETURN
PsiElement(return)('return')
- PsiElement(LABEL_IDENTIFIER)('@a')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(LABEL_IDENTIFIER)('@a')
PsiWhiteSpace('\n ')
RETURN
PsiElement(return)('return')
- PsiElement(LABEL_IDENTIFIER)('@a')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(LABEL_IDENTIFIER)('@a')
PsiWhiteSpace(' ')
INTEGER_CONSTANT
PsiElement(INTEGER_LITERAL)('1')
PsiWhiteSpace('\n ')
RETURN
PsiElement(return)('return')
- PsiElement(LABEL_IDENTIFIER)('@a')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(LABEL_IDENTIFIER)('@a')
PsiWhiteSpace(' ')
PARENTHESIZED
PsiElement(LPAR)('(')
@@ -98,7 +112,9 @@ JetFile: Labels.jet
PsiWhiteSpace('\n ')
RETURN
PsiElement(return)('return')
- PsiElement(LABEL_IDENTIFIER)('@a')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(LABEL_IDENTIFIER)('@a')
PsiWhiteSpace(' ')
PREFIX_EXPRESSION
OPERATION_REFERENCE
@@ -109,18 +125,24 @@ JetFile: Labels.jet
PsiWhiteSpace('\n\n ')
RETURN
PsiElement(return)('return')
- PsiElement(ATAT)('@@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(ATAT)('@@')
PsiWhiteSpace('\n ')
RETURN
PsiElement(return)('return')
- PsiElement(ATAT)('@@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(ATAT)('@@')
PsiWhiteSpace(' ')
INTEGER_CONSTANT
PsiElement(INTEGER_LITERAL)('1')
PsiWhiteSpace('\n ')
RETURN
PsiElement(return)('return')
- PsiElement(ATAT)('@@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(ATAT)('@@')
PsiWhiteSpace(' ')
PARENTHESIZED
PsiElement(LPAR)('(')
@@ -134,7 +156,9 @@ JetFile: Labels.jet
PsiWhiteSpace('\n ')
RETURN
PsiElement(return)('return')
- PsiElement(ATAT)('@@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(ATAT)('@@')
PsiWhiteSpace(' ')
PREFIX_EXPRESSION
OPERATION_REFERENCE
@@ -149,7 +173,9 @@ JetFile: Labels.jet
PsiWhiteSpace(' ')
RETURN
PsiElement(return)('return')
- PsiElement(ATAT)('@@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(ATAT)('@@')
PsiWhiteSpace('\n ')
PREFIX_EXPRESSION
OPERATION_REFERENCE
@@ -157,7 +183,9 @@ JetFile: Labels.jet
PsiWhiteSpace(' ')
RETURN
PsiElement(return)('return')
- PsiElement(ATAT)('@@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(ATAT)('@@')
PsiWhiteSpace(' ')
INTEGER_CONSTANT
PsiElement(INTEGER_LITERAL)('1')
@@ -168,7 +196,9 @@ JetFile: Labels.jet
PsiWhiteSpace(' ')
RETURN
PsiElement(return)('return')
- PsiElement(ATAT)('@@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(ATAT)('@@')
PsiWhiteSpace(' ')
PARENTHESIZED
PsiElement(LPAR)('(')
@@ -186,7 +216,9 @@ JetFile: Labels.jet
PsiWhiteSpace(' ')
RETURN
PsiElement(return)('return')
- PsiElement(ATAT)('@@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(ATAT)('@@')
PsiWhiteSpace(' ')
PREFIX_EXPRESSION
OPERATION_REFERENCE
@@ -200,30 +232,42 @@ JetFile: Labels.jet
PsiWhiteSpace('\n ')
BREAK
PsiElement(break)('break')
- PsiElement(AT)('@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(AT)('@')
PsiWhiteSpace('\n ')
BREAK
PsiElement(break)('break')
- PsiElement(LABEL_IDENTIFIER)('@a')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(LABEL_IDENTIFIER)('@a')
PsiWhiteSpace('\n ')
BREAK
PsiElement(break)('break')
- PsiElement(ATAT)('@@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(ATAT)('@@')
PsiWhiteSpace('\n\n ')
CONTINUE
PsiElement(continue)('continue')
PsiWhiteSpace('\n ')
CONTINUE
PsiElement(continue)('continue')
- PsiElement(AT)('@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(AT)('@')
PsiWhiteSpace('\n ')
CONTINUE
PsiElement(continue)('continue')
- PsiElement(LABEL_IDENTIFIER)('@a')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(LABEL_IDENTIFIER)('@a')
PsiWhiteSpace('\n ')
CONTINUE
PsiElement(continue)('continue')
- PsiElement(ATAT)('@@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(ATAT)('@@')
PsiWhiteSpace('\n\n ')
DOT_QUALIFIED_EXPRESSION
REFERENCE_EXPRESSION
@@ -255,7 +299,9 @@ JetFile: Labels.jet
PsiWhiteSpace('\n ')
RETURN
PsiElement(return)('return')
- PsiElement(LABEL_IDENTIFIER)('@f')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(LABEL_IDENTIFIER)('@f')
PsiWhiteSpace(' ')
BOOLEAN_CONSTANT
PsiElement(true)('true')
@@ -292,7 +338,9 @@ JetFile: Labels.jet
PsiWhiteSpace('\n ')
RETURN
PsiElement(return)('return')
- PsiElement(AT)('@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(AT)('@')
PsiWhiteSpace(' ')
BOOLEAN_CONSTANT
PsiElement(true)('true')
@@ -329,7 +377,9 @@ JetFile: Labels.jet
PsiWhiteSpace('\n ')
RETURN
PsiElement(return)('return')
- PsiElement(ATAT)('@@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(ATAT)('@@')
PsiWhiteSpace(' ')
BOOLEAN_CONSTANT
PsiElement(true)('true')
@@ -341,18 +391,21 @@ JetFile: Labels.jet
PsiWhiteSpace('\n ')
THIS_EXPRESSION
PsiElement(this)('this')
- LABEL_REFERENCE
- PsiElement(AT)('@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(AT)('@')
PsiWhiteSpace('\n ')
THIS_EXPRESSION
PsiElement(this)('this')
- LABEL_REFERENCE
- PsiElement(LABEL_IDENTIFIER)('@a')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(LABEL_IDENTIFIER)('@a')
PsiWhiteSpace('\n ')
THIS_EXPRESSION
PsiElement(this)('this')
- LABEL_REFERENCE
- PsiElement(ATAT)('@@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(ATAT)('@@')
PsiWhiteSpace('\n\n ')
THIS_EXPRESSION
PsiElement(this)('this')
@@ -365,8 +418,9 @@ JetFile: Labels.jet
PsiWhiteSpace('\n ')
THIS_EXPRESSION
PsiElement(this)('this')
- LABEL_REFERENCE
- PsiElement(AT)('@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(AT)('@')
PsiElement(LT)('<')
TYPE_REFERENCE
USER_TYPE
@@ -376,8 +430,9 @@ JetFile: Labels.jet
PsiWhiteSpace('\n ')
THIS_EXPRESSION
PsiElement(this)('this')
- LABEL_REFERENCE
- PsiElement(LABEL_IDENTIFIER)('@a')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(LABEL_IDENTIFIER)('@a')
PsiElement(LT)('<')
TYPE_REFERENCE
USER_TYPE
@@ -387,8 +442,9 @@ JetFile: Labels.jet
PsiWhiteSpace('\n ')
THIS_EXPRESSION
PsiElement(this)('this')
- LABEL_REFERENCE
- PsiElement(ATAT)('@@')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(ATAT)('@@')
PsiElement(LT)('<')
TYPE_REFERENCE
USER_TYPE
diff --git a/idea/testData/psi/SimpleExpressions.txt b/idea/testData/psi/SimpleExpressions.txt
index ee8ea4924d68e..8f4dc0157c43c 100644
--- a/idea/testData/psi/SimpleExpressions.txt
+++ b/idea/testData/psi/SimpleExpressions.txt
@@ -575,7 +575,9 @@ JetFile: SimpleExpressions.jet
BREAK
PsiElement(break)('break')
PsiWhiteSpace(' ')
- PsiElement(LABEL_IDENTIFIER)('@la')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(LABEL_IDENTIFIER)('@la')
PsiElement(COMMA)(',')
PsiWhiteSpace('\n ')
VALUE_PARAMETER
@@ -609,7 +611,9 @@ JetFile: SimpleExpressions.jet
CONTINUE
PsiElement(continue)('continue')
PsiWhiteSpace(' ')
- PsiElement(LABEL_IDENTIFIER)('@la')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(LABEL_IDENTIFIER)('@la')
PsiElement(COMMA)(',')
PsiWhiteSpace('\n ')
VALUE_PARAMETER
@@ -702,7 +706,9 @@ JetFile: SimpleExpressions.jet
BREAK
PsiElement(break)('break')
PsiWhiteSpace(' ')
- PsiElement(LABEL_IDENTIFIER)('@la')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(LABEL_IDENTIFIER)('@la')
PsiWhiteSpace('\n ')
CONTINUE
PsiElement(continue)('continue')
@@ -714,6 +720,8 @@ JetFile: SimpleExpressions.jet
CONTINUE
PsiElement(continue)('continue')
PsiWhiteSpace(' ')
- PsiElement(LABEL_IDENTIFIER)('@la')
+ LABEL_QUALIFIER
+ LABEL_REFERENCE
+ PsiElement(LABEL_IDENTIFIER)('@la')
PsiWhiteSpace('\n')
PsiElement(RBRACE)('}')
\ No newline at end of file
diff --git a/idea/tests/org/jetbrains/jet/checkers/JetPsiCheckerTest.java b/idea/tests/org/jetbrains/jet/checkers/JetPsiCheckerTest.java
index fb257b919d077..70b1519da38ee 100644
--- a/idea/tests/org/jetbrains/jet/checkers/JetPsiCheckerTest.java
+++ b/idea/tests/org/jetbrains/jet/checkers/JetPsiCheckerTest.java
@@ -29,6 +29,6 @@ public void testBinaryCallsOnNullableValues() throws Exception {
}
public void testQualifiedThis() throws Exception {
- doTest("/checker/QualifiedThis.jet", true, true);
+// doTest("/checker/QualifiedThis.jet", true, true);
}
}
|
8f4cb0486b6e6ab48e6df6b2e3e44a7eca27c911
|
camel
|
CAMEL-1789 Let the Camel Route support to lookup- the service which is exported from the OSGi bundle--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@792398 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/camel
|
diff --git a/components/camel-osgi/src/main/java/org/apache/camel/osgi/CamelContextFactory.java b/components/camel-osgi/src/main/java/org/apache/camel/osgi/CamelContextFactory.java
index dea39dfd4af93..35f9348bf6ff2 100644
--- a/components/camel-osgi/src/main/java/org/apache/camel/osgi/CamelContextFactory.java
+++ b/components/camel-osgi/src/main/java/org/apache/camel/osgi/CamelContextFactory.java
@@ -18,10 +18,13 @@
import java.util.List;
+import org.apache.camel.CamelContext;
import org.apache.camel.impl.DefaultCamelContext;
import org.apache.camel.impl.converter.AnnotationTypeConverterLoader;
import org.apache.camel.impl.converter.DefaultTypeConverter;
import org.apache.camel.impl.converter.TypeConverterLoader;
+import org.apache.camel.spring.SpringCamelContext;
+import org.apache.camel.util.ObjectHelper;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.osgi.framework.BundleContext;
@@ -54,6 +57,7 @@ public DefaultCamelContext createContext() {
if (LOG.isDebugEnabled()) {
LOG.debug("Using OSGI resolvers");
}
+ updateRegistry(context);
LOG.debug("Using OsgiFactoryFinderResolver");
context.setFactoryFinderResolver(new OsgiFactoryFinderResolver());
LOG.debug("Using OsgiPackageScanClassResolver");
@@ -64,13 +68,23 @@ public DefaultCamelContext createContext() {
context.setLanguageResolver(new OsgiLanguageResolver());
addOsgiAnnotationTypeConverterLoader(context, bundleContext);
} else {
- // TODO: should we not thrown an excpetion to not allow it to startup
+ // TODO: should we not thrown an exception to not allow it to startup
LOG.warn("BundleContext not set, cannot run in OSGI container");
}
return context;
}
+ protected void updateRegistry(DefaultCamelContext context) {
+ ObjectHelper.notNull(bundleContext, "BundleContext");
+ LOG.debug("Setting the OSGi ServiceRegistry");
+ OsgiServiceRegistry osgiServiceRegistry = new OsgiServiceRegistry(bundleContext);
+ CompositeRegistry compositeRegistry = new CompositeRegistry();
+ compositeRegistry.addRegistry(osgiServiceRegistry);
+ compositeRegistry.addRegistry(context.getRegistry());
+ context.setRegistry(compositeRegistry);
+ }
+
protected void addOsgiAnnotationTypeConverterLoader(DefaultCamelContext context, BundleContext bundleContext) {
LOG.debug("Using OsgiAnnotationTypeConverterLoader");
DefaultTypeConverter typeConverter = (DefaultTypeConverter) context.getTypeConverter();
diff --git a/components/camel-osgi/src/main/java/org/apache/camel/osgi/CamelContextFactoryBean.java b/components/camel-osgi/src/main/java/org/apache/camel/osgi/CamelContextFactoryBean.java
index e0dcdeed485e7..4a2f20842a115 100644
--- a/components/camel-osgi/src/main/java/org/apache/camel/osgi/CamelContextFactoryBean.java
+++ b/components/camel-osgi/src/main/java/org/apache/camel/osgi/CamelContextFactoryBean.java
@@ -22,10 +22,13 @@
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
+import org.apache.camel.CamelContext;
+import org.apache.camel.impl.DefaultCamelContext;
import org.apache.camel.impl.converter.AnnotationTypeConverterLoader;
import org.apache.camel.impl.converter.DefaultTypeConverter;
import org.apache.camel.impl.converter.TypeConverterLoader;
import org.apache.camel.spring.SpringCamelContext;
+import org.apache.camel.util.ObjectHelper;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.osgi.framework.BundleContext;
@@ -56,6 +59,7 @@ protected SpringCamelContext createContext() {
if (LOG.isDebugEnabled()) {
LOG.debug("Using OSGI resolvers");
}
+ updateRegistry(context);
LOG.debug("Using OsgiFactoryFinderResolver");
context.setFactoryFinderResolver(new OsgiFactoryFinderResolver());
LOG.debug("Using OsgiPackageScanClassResolver");
@@ -73,6 +77,16 @@ protected SpringCamelContext createContext() {
return context;
}
+ protected void updateRegistry(DefaultCamelContext context) {
+ ObjectHelper.notNull(bundleContext, "BundleContext");
+ LOG.debug("Setting the OSGi ServiceRegistry");
+ OsgiServiceRegistry osgiServiceRegistry = new OsgiServiceRegistry(bundleContext);
+ CompositeRegistry compositeRegistry = new CompositeRegistry();
+ compositeRegistry.addRegistry(osgiServiceRegistry);
+ compositeRegistry.addRegistry(context.getRegistry());
+ context.setRegistry(compositeRegistry);
+ }
+
protected void addOsgiAnnotationTypeConverterLoader(SpringCamelContext context) {
LOG.debug("Using OsgiAnnotationTypeConverterLoader");
diff --git a/components/camel-osgi/src/main/java/org/apache/camel/osgi/CompositeRegistry.java b/components/camel-osgi/src/main/java/org/apache/camel/osgi/CompositeRegistry.java
new file mode 100644
index 0000000000000..8f2f8dcdcd992
--- /dev/null
+++ b/components/camel-osgi/src/main/java/org/apache/camel/osgi/CompositeRegistry.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.osgi;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.camel.spi.Registry;
+
+/**
+ * This registry will look up the object with the sequence of the registry list untill it find the Object.
+ */
+public class CompositeRegistry implements Registry {
+ private List<Registry> registryList;
+
+ public CompositeRegistry() {
+ registryList = new ArrayList<Registry>();
+ }
+
+ public CompositeRegistry(List<Registry> registries) {
+ registryList = registries;
+ }
+
+ public void addRegistry(Registry registry) {
+ registryList.add(registry);
+ }
+
+ public <T> T lookup(String name, Class<T> type) {
+ T answer = null;
+ for (Registry registry : registryList) {
+ answer = registry.lookup(name, type);
+ if (answer != null) {
+ break;
+ }
+ }
+ return answer;
+ }
+
+ public Object lookup(String name) {
+ Object answer = null;
+ for (Registry registry : registryList) {
+ answer = registry.lookup(name);
+ if (answer != null) {
+ break;
+ }
+ }
+ return answer;
+ }
+
+ @SuppressWarnings("unchecked")
+ public <T> Map<String, T> lookupByType(Class<T> type) {
+ Map<String, T> answer = Collections.EMPTY_MAP;
+ for (Registry registry : registryList) {
+ answer = registry.lookupByType(type);
+ if (answer != Collections.EMPTY_MAP) {
+ break;
+ }
+ }
+ return answer;
+ }
+
+}
diff --git a/components/camel-osgi/src/main/java/org/apache/camel/osgi/OsgiServiceRegistry.java b/components/camel-osgi/src/main/java/org/apache/camel/osgi/OsgiServiceRegistry.java
new file mode 100644
index 0000000000000..09ff58c19aa61
--- /dev/null
+++ b/components/camel-osgi/src/main/java/org/apache/camel/osgi/OsgiServiceRegistry.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.osgi;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.camel.spi.Registry;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.ServiceReference;
+import org.springframework.osgi.context.BundleContextAware;
+
+/**
+ * The OsgiServiceRegistry support to get the service object from the bundle context
+ */
+public class OsgiServiceRegistry implements Registry {
+ private BundleContext bundleContext;
+ private Map<String, Object> serviceCacheMap = new ConcurrentHashMap<String, Object>();
+
+ public OsgiServiceRegistry(BundleContext bc) {
+ bundleContext = bc;
+ }
+
+ public <T> T lookup(String name, Class<T> type) {
+ Object service = lookup(name);
+ return type.cast(service);
+ }
+
+ public Object lookup(String name) {
+ Object service = serviceCacheMap.get(name);
+ if (service == null) {
+ ServiceReference sr = bundleContext.getServiceReference(name);
+ if (sr != null) {
+ // TODO need to keep the track of Service
+ // and call ungetService when the camel context is closed
+ service = bundleContext.getService(sr);
+ if (service != null) {
+ serviceCacheMap.put(name, service);
+ }
+ }
+ }
+ return service;
+ }
+
+ @SuppressWarnings("unchecked")
+ public <T> Map<String, T> lookupByType(Class<T> type) {
+ // not implemented so we return an empty map
+ return Collections.EMPTY_MAP;
+ }
+
+}
diff --git a/components/camel-osgi/src/test/java/org/apache/camel/osgi/CamelContextFactoryTest.java b/components/camel-osgi/src/test/java/org/apache/camel/osgi/CamelContextFactoryTest.java
new file mode 100644
index 0000000000000..26c4b0db28ea5
--- /dev/null
+++ b/components/camel-osgi/src/test/java/org/apache/camel/osgi/CamelContextFactoryTest.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.osgi;
+
+import org.apache.camel.impl.DefaultCamelContext;
+import org.apache.camel.osgi.test.MyService;
+import org.junit.Test;
+
+public class CamelContextFactoryTest extends CamelOsgiTestSupport {
+ @Test
+ public void osigServiceRegistryTest() {
+ CamelContextFactory factory = new CamelContextFactory();
+ factory.setBundleContext(getBundleContext());
+ DefaultCamelContext context = factory.createContext();
+ MyService myService = context.getRegistry().lookup(MyService.class.getName(), MyService.class);
+ assertNotNull("MyService should not be null", myService);
+ }
+
+}
diff --git a/components/camel-osgi/src/test/java/org/apache/camel/osgi/CamelMockBundleContext.java b/components/camel-osgi/src/test/java/org/apache/camel/osgi/CamelMockBundleContext.java
new file mode 100644
index 0000000000000..3afd9b413ef4e
--- /dev/null
+++ b/components/camel-osgi/src/test/java/org/apache/camel/osgi/CamelMockBundleContext.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.camel.osgi;
+
+import org.apache.camel.osgi.test.MyService;
+import org.osgi.framework.Constants;
+import org.osgi.framework.ServiceReference;
+import org.springframework.osgi.mock.MockBundleContext;
+
+/**
+ *
+ */
+public class CamelMockBundleContext extends MockBundleContext {
+
+ public Object getService(ServiceReference reference) {
+ String[] classNames = (String[]) reference.getProperty(Constants.OBJECTCLASS);
+ System.out.println("The class name is " + classNames[0]);
+ if (classNames[0].equals("org.apache.camel.osgi.test.MyService")) {
+ return new MyService();
+ } else {
+ return null;
+ }
+ }
+
+}
diff --git a/components/camel-osgi/src/test/java/org/apache/camel/osgi/CamelOsgiTestSupport.java b/components/camel-osgi/src/test/java/org/apache/camel/osgi/CamelOsgiTestSupport.java
index 77984c1353f6f..26079533d2a5b 100644
--- a/components/camel-osgi/src/test/java/org/apache/camel/osgi/CamelOsgiTestSupport.java
+++ b/components/camel-osgi/src/test/java/org/apache/camel/osgi/CamelOsgiTestSupport.java
@@ -16,7 +16,6 @@
*/
package org.apache.camel.osgi;
-import junit.framework.TestCase;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
@@ -26,7 +25,7 @@
public class CamelOsgiTestSupport extends Assert {
private Activator testActivator;
- private MockBundleContext bundleContext = new MockBundleContext();
+ private MockBundleContext bundleContext = new CamelMockBundleContext();
private OsgiPackageScanClassResolver resolver = new OsgiPackageScanClassResolver(bundleContext);
private MockBundle bundle = new CamelMockBundle();
diff --git a/components/camel-osgi/src/test/java/org/apache/camel/osgi/test/MyService.java b/components/camel-osgi/src/test/java/org/apache/camel/osgi/test/MyService.java
new file mode 100644
index 0000000000000..65e246f9de205
--- /dev/null
+++ b/components/camel-osgi/src/test/java/org/apache/camel/osgi/test/MyService.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.osgi.test;
+
+public class MyService {
+ public String sayHi() {
+ return "Hello";
+ }
+
+}
|
61401651a988791c5cd106f517ceb42ab2ef13e7
|
restlet-framework-java
|
- Improved exception handling on createResource().--
|
p
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet/src/org/restlet/Finder.java b/modules/org.restlet/src/org/restlet/Finder.java
index bdf8e0d8e8..28ce55dca3 100644
--- a/modules/org.restlet/src/org/restlet/Finder.java
+++ b/modules/org.restlet/src/org/restlet/Finder.java
@@ -125,7 +125,9 @@ private boolean allowMethod(Method method, Resource target) {
/**
* Creates a new instance of the resource class designated by the
- * "targetClass" property.
+ * "targetClass" property. Note that Error and RuntimeException thrown by
+ * Resource constructors are rethrown by this method. Other exception are
+ * caught and logged.
*
* @param request
* The request to handle.
@@ -158,6 +160,8 @@ public Resource createResource(Request request, Response response) {
} catch (InvocationTargetException e) {
if (e.getCause() instanceof Error) {
throw (Error) e.getCause();
+ } else if (e.getCause() instanceof RuntimeException) {
+ throw (RuntimeException) e.getCause();
} else {
getLogger()
.log(
|
100f571c9a2835d5a30a55374b9be74c147e031f
|
ReactiveX-RxJava
|
forEach with Action1 but not Observer--I re-read the MSDN docs and found the previous implementation wasn't complying with the contract.--http://msdn.microsoft.com/en-us/library/hh211815(v=vs.103).aspx--I believe this now does.-
|
c
|
https://github.com/ReactiveX/RxJava
|
diff --git a/language-adaptors/rxjava-groovy/src/test/groovy/rx/lang/groovy/ObservableTests.groovy b/language-adaptors/rxjava-groovy/src/test/groovy/rx/lang/groovy/ObservableTests.groovy
index 7f62af5724..967c096691 100644
--- a/language-adaptors/rxjava-groovy/src/test/groovy/rx/lang/groovy/ObservableTests.groovy
+++ b/language-adaptors/rxjava-groovy/src/test/groovy/rx/lang/groovy/ObservableTests.groovy
@@ -22,6 +22,7 @@ import java.util.Arrays;
import org.junit.Before;
import org.junit.Test;
+import static org.junit.Assert.*;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
@@ -222,33 +223,17 @@ def class ObservableTests {
verify(a, times(1)).received(3);
}
- @Test
- public void testForEachWithComplete() {
- Observable.create(new AsyncObservable()).forEach({ result -> a.received(result)}, {}, {a.received('done')});
- verify(a, times(1)).received(1);
- verify(a, times(1)).received(2);
- verify(a, times(1)).received(3);
- verify(a, times(1)).received("done");
- }
-
@Test
public void testForEachWithError() {
- Observable.create(new AsyncObservable()).forEach({ result -> throw new RuntimeException('err')}, {err -> a.received(err.message)});
- verify(a, times(0)).received(1);
- verify(a, times(0)).received(2);
- verify(a, times(0)).received(3);
- verify(a, times(1)).received("err");
- verify(a, times(0)).received("done");
- }
-
- @Test
- public void testForEachWithCompleteAndError() {
- Observable.create(new AsyncObservable()).forEach({ result -> throw new RuntimeException('err')}, {err -> a.received(err.message)}, {a.received('done')},);
+ try {
+ Observable.create(new AsyncObservable()).forEach({ result -> throw new RuntimeException('err')});
+ fail("we expect an exception to be thrown");
+ }catch(Exception e) {
+
+ }
verify(a, times(0)).received(1);
verify(a, times(0)).received(2);
verify(a, times(0)).received(3);
- verify(a, times(1)).received("err");
- verify(a, times(0)).received("done");
}
def class AsyncObservable implements Func1<Observer<Integer>, Subscription> {
diff --git a/rxjava-core/src/main/java/rx/Observable.java b/rxjava-core/src/main/java/rx/Observable.java
index 86e85be0f5..ba9cc6827b 100644
--- a/rxjava-core/src/main/java/rx/Observable.java
+++ b/rxjava-core/src/main/java/rx/Observable.java
@@ -26,6 +26,7 @@
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicReference;
import org.junit.Before;
import org.junit.Test;
@@ -334,33 +335,39 @@ public void onNext(T args) {
}
/**
- * Blocking version of {@link #subscribe(Observer)}.
+ * Invokes an action for each element in the observable sequence, and blocks until the sequence is terminated.
* <p>
* NOTE: This will block even if the Observable is asynchronous.
+ * <p>
+ * This is similar to {@link #subscribe(Observer)} but blocks. Because it blocks it does not need the {@link Observer#onCompleted()} or {@link Observer#onError(Exception)} methods.
*
- * @param observer
+ * @param onNext
+ * {@link Action1}
+ * @throws RuntimeException
+ * if error occurs
*/
- public void forEach(final Observer<T> observer) {
+ public void forEach(final Action1<T> onNext) {
final CountDownLatch latch = new CountDownLatch(1);
+ final AtomicReference<Exception> exceptionFromOnError = new AtomicReference<Exception>();
+
subscribe(new Observer<T>() {
public void onCompleted() {
- try {
- observer.onCompleted();
- } finally {
- latch.countDown();
- }
+ latch.countDown();
}
public void onError(Exception e) {
- try {
- observer.onError(e);
- } finally {
- latch.countDown();
- }
+ /*
+ * If we receive an onError event we set the reference on the outer thread
+ * so we can git it and throw after the latch.await().
+ *
+ * We do this instead of throwing directly since this may be on a different thread and the latch is still waiting.
+ */
+ exceptionFromOnError.set(e);
+ latch.countDown();
}
public void onNext(T args) {
- observer.onNext(args);
+ onNext.call(args);
}
});
// block until the subscription completes and then return
@@ -369,46 +376,21 @@ public void onNext(T args) {
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while waiting for subscription to complete.", e);
}
- }
-
- @SuppressWarnings({ "rawtypes", "unchecked" })
- public void forEach(final Map<String, Object> callbacks) {
- // lookup and memoize onNext
- Object _onNext = callbacks.get("onNext");
- if (_onNext == null) {
- throw new RuntimeException("onNext must be implemented");
- }
- final FuncN onNext = Functions.from(_onNext);
-
- forEach(new Observer() {
-
- public void onCompleted() {
- Object onComplete = callbacks.get("onCompleted");
- if (onComplete != null) {
- Functions.from(onComplete).call();
- }
- }
-
- public void onError(Exception e) {
- handleError(e);
- Object onError = callbacks.get("onError");
- if (onError != null) {
- Functions.from(onError).call(e);
- }
- }
- public void onNext(Object args) {
- onNext.call(args);
+ if (exceptionFromOnError.get() != null) {
+ if (exceptionFromOnError.get() instanceof RuntimeException) {
+ throw (RuntimeException) exceptionFromOnError.get();
+ } else {
+ throw new RuntimeException(exceptionFromOnError.get());
}
-
- });
+ }
}
@SuppressWarnings({ "rawtypes", "unchecked" })
public void forEach(final Object o) {
- if (o instanceof Observer) {
- // in case a dynamic language is not correctly handling the overloaded methods and we receive an Observer just forward to the correct method.
- forEach((Observer) o);
+ if (o instanceof Action1) {
+ // in case a dynamic language is not correctly handling the overloaded methods and we receive an Action1 just forward to the correct method.
+ forEach((Action1) o);
}
// lookup and memoize onNext
@@ -417,156 +399,15 @@ public void forEach(final Object o) {
}
final FuncN onNext = Functions.from(o);
- forEach(new Observer() {
-
- public void onCompleted() {
- // do nothing
- }
-
- public void onError(Exception e) {
- handleError(e);
- // no callback defined
- }
-
- public void onNext(Object args) {
- onNext.call(args);
- }
-
- });
- }
-
- public void forEach(final Action1<T> onNext) {
-
- forEach(new Observer<T>() {
-
- public void onCompleted() {
- // do nothing
- }
-
- public void onError(Exception e) {
- handleError(e);
- // no callback defined
- }
-
- public void onNext(T args) {
- if (onNext == null) {
- throw new RuntimeException("onNext must be implemented");
- }
- onNext.call(args);
- }
-
- });
- }
-
- @SuppressWarnings({ "rawtypes", "unchecked" })
- public void forEach(final Object onNext, final Object onError) {
- // lookup and memoize onNext
- if (onNext == null) {
- throw new RuntimeException("onNext must be implemented");
- }
- final FuncN onNextFunction = Functions.from(onNext);
-
- forEach(new Observer() {
+ forEach(new Action1() {
- public void onCompleted() {
- // do nothing
- }
-
- public void onError(Exception e) {
- handleError(e);
- if (onError != null) {
- Functions.from(onError).call(e);
- }
- }
-
- public void onNext(Object args) {
- onNextFunction.call(args);
- }
-
- });
- }
-
- public void forEach(final Action1<T> onNext, final Action1<Exception> onError) {
-
- forEach(new Observer<T>() {
-
- public void onCompleted() {
- // do nothing
- }
-
- public void onError(Exception e) {
- handleError(e);
- if (onError != null) {
- onError.call(e);
- }
- }
-
- public void onNext(T args) {
- if (onNext == null) {
- throw new RuntimeException("onNext must be implemented");
- }
+ public void call(Object args) {
onNext.call(args);
}
});
}
- @SuppressWarnings({ "rawtypes", "unchecked" })
- public void forEach(final Object onNext, final Object onError, final Object onComplete) {
- // lookup and memoize onNext
- if (onNext == null) {
- throw new RuntimeException("onNext must be implemented");
- }
- final FuncN onNextFunction = Functions.from(onNext);
-
- forEach(new Observer() {
-
- public void onCompleted() {
- if (onComplete != null) {
- Functions.from(onComplete).call();
- }
- }
-
- public void onError(Exception e) {
- handleError(e);
- if (onError != null) {
- Functions.from(onError).call(e);
- }
- }
-
- public void onNext(Object args) {
- onNextFunction.call(args);
- }
-
- });
- }
-
- public void forEach(final Action1<T> onNext, final Action1<Exception> onError, final Action0 onComplete) {
-
- forEach(new Observer<T>() {
-
- public void onCompleted() {
- onComplete.call();
- }
-
- public void onError(Exception e) {
- handleError(e);
- if (onError != null) {
- onError.call(e);
- }
- }
-
- public void onNext(T args) {
- if (onNext == null) {
- throw new RuntimeException("onNext must be implemented");
- }
- onNext.call(args);
- }
-
- });
- }
-
-
/**
* Allow the {@link RxJavaErrorHandler} to receive the exception from onError.
*
|
4fb6823b3fcc1105589f30a9376fa533fb58a39d
|
drools
|
Removed unncessary else if check; replaced with- else--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@26039 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
p
|
https://github.com/kiegroup/drools
|
diff --git a/drools-core/src/main/java/org/drools/reteoo/ObjectSinkNodeList.java b/drools-core/src/main/java/org/drools/reteoo/ObjectSinkNodeList.java
index 9adaf99483d..3a06acce6b0 100644
--- a/drools-core/src/main/java/org/drools/reteoo/ObjectSinkNodeList.java
+++ b/drools-core/src/main/java/org/drools/reteoo/ObjectSinkNodeList.java
@@ -86,7 +86,7 @@ public void writeExternal(ObjectOutput out) throws IOException {
public void add(final ObjectSinkNode node) {
if ( this.firstNode == null ) {
this.firstNode = node;
- this.lastNode = node;;
+ this.lastNode = node;
} else {
this.lastNode.setNextObjectSinkNode( node );
node.setPreviousObjectSinkNode( this.lastNode );
@@ -114,7 +114,7 @@ public void remove(final ObjectSinkNode node) {
} else {
if ( this.firstNode == node ) {
removeFirst();
- } else if ( this.lastNode == node ) {
+ } else {
removeLast();
}
}
|
55eb41292d2989225b4c4c012a0b34f2edfd54b5
|
camel
|
Fix test error reported by TeamCity--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@739402 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/camel
|
diff --git a/components/camel-rss/src/test/java/org/apache/camel/dataformat/rss/RssDataFormatTest.java b/components/camel-rss/src/test/java/org/apache/camel/dataformat/rss/RssDataFormatTest.java
index f945699c3f6ee..15b5c9d1fb6fd 100644
--- a/components/camel-rss/src/test/java/org/apache/camel/dataformat/rss/RssDataFormatTest.java
+++ b/components/camel-rss/src/test/java/org/apache/camel/dataformat/rss/RssDataFormatTest.java
@@ -59,9 +59,9 @@ protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: ex
- from("rss:file:src/test/data/rss20.xml?splitEntries=false&consumer.delay=100").marshal().rss().to("mock:marshal");
+ from("rss:file:src/test/data/rss20.xml?splitEntries=false&consumer.delay=1000").marshal().rss().to("mock:marshal");
// END SNIPPET: ex
- from("rss:file:src/test/data/rss20.xml?splitEntries=false&consumer.delay=100").marshal().rss().unmarshal().rss().to("mock:unmarshal");
+ from("rss:file:src/test/data/rss20.xml?splitEntries=false&consumer.delay=1000").marshal().rss().unmarshal().rss().to("mock:unmarshal");
}
};
}
|
3b73c810de961f01ab2dd27710c7e17a0490e208
|
orientdb
|
Started support for server-side triggers--
|
a
|
https://github.com/orientechnologies/orientdb
|
diff --git a/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemote.java b/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemote.java
index ffe771f4c58..ab7c460ee98 100644
--- a/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemote.java
+++ b/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemote.java
@@ -1247,12 +1247,11 @@ private ORecordInternal<?> readRecordFromNetwork(final ODatabaseRecord iDatabase
final ORecordInternal<?> record = ORecordFactory.newInstance(network.readByte());
if (record instanceof ORecordSchemaAware<?>)
- ((ORecordSchemaAware<?>) record).fill(iDatabase, classId, network.readShort(), network.readLong(), network.readInt());
+ ((ORecordSchemaAware<?>) record).fill(iDatabase, classId, network.readShort(), network.readLong(), network.readInt(),
+ network.readBytes());
else
// DISCARD CLASS ID
- record.fill(iDatabase, network.readShort(), network.readLong(), network.readInt());
-
- record.fromStream(network.readBytes());
+ record.fill(iDatabase, network.readShort(), network.readLong(), network.readInt(), network.readBytes());
return record;
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseRecordAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseRecordAbstract.java
index 1de1700b8f4..f6fb23cf754 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseRecordAbstract.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseRecordAbstract.java
@@ -392,12 +392,12 @@ public <RET extends ORecordInternal<?>> RET executeReadRecord(final int iCluster
if (currDb == null)
currDb = (ODatabaseRecord) databaseOwner;
- iRecord.fill(currDb, iClusterId, iPosition, recordBuffer.version);
+ iRecord.fill(currDb, iClusterId, iPosition, recordBuffer.version, recordBuffer.buffer);
iRecord.fromStream(recordBuffer.buffer);
iRecord.setStatus(STATUS.LOADED);
callbackHooks(TYPE.AFTER_READ, iRecord);
-
+
if (!iIgnoreCache) {
getCache().pushRecord(iRecord);
}
@@ -482,7 +482,7 @@ public void executeSaveRecord(final ORecordInternal<?> iRecord, final String iCl
if (isNew) {
// UPDATE INFORMATION: CLUSTER ID+POSITION
- iRecord.fill(iRecord.getDatabase(), clusterId, result, 0);
+ iRecord.fill(iRecord.getDatabase(), clusterId, result, 0, stream);
iRecord.setStatus(STATUS.LOADED);
if (stream != null && stream.length > 0)
callbackHooks(TYPE.AFTER_CREATE, iRecord);
@@ -491,7 +491,7 @@ public void executeSaveRecord(final ORecordInternal<?> iRecord, final String iCl
iRecord.onAfterIdentityChanged(iRecord);
} else {
// UPDATE INFORMATION: VERSION
- iRecord.fill(iRecord.getDatabase(), clusterId, rid.getClusterPosition(), (int) result);
+ iRecord.fill(iRecord.getDatabase(), clusterId, rid.getClusterPosition(), (int) result, stream);
iRecord.setStatus(STATUS.LOADED);
if (stream != null && stream.length > 0)
callbackHooks(TYPE.AFTER_UPDATE, iRecord);
diff --git a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordAbstract.java
index 63e751e6f35..0d1a92b3b37 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordAbstract.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordAbstract.java
@@ -50,10 +50,14 @@ public ORecordAbstract(final ODatabaseRecord iDatabase, final byte[] iSource) {
unsetDirty();
}
- public ORecordAbstract<?> fill(final ODatabaseRecord iDatabase, final int iClusterId, final long iPosition, final int iVersion) {
+ public ORecordAbstract<?> fill(final ODatabaseRecord iDatabase, final int iClusterId, final long iPosition, final int iVersion,
+ final byte[] iBuffer) {
_database = iDatabase;
setIdentity(iClusterId, iPosition);
_version = iVersion;
+ _status = STATUS.LOADED;
+ _source = iBuffer;
+
return this;
}
@@ -242,7 +246,7 @@ public ORecordInternal<T> reload(final String iFetchPlan) {
_database.reload(this, iFetchPlan);
// GET CONTENT
- //fromStream(toStream());
+ // fromStream(toStream());
return this;
} catch (Exception e) {
diff --git a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordInternal.java b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordInternal.java
index 8d090da853d..70f869c3c57 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordInternal.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordInternal.java
@@ -23,7 +23,7 @@
* Generic record representation. The object can be reused across call to the database.
*/
public interface ORecordInternal<T> extends ORecord<T>, OSerializableStream {
- public ORecordAbstract<?> fill(ODatabaseRecord iDatabase, int iClusterId, long iPosition, int iVersion);
+ public ORecordAbstract<?> fill(ODatabaseRecord iDatabase, int iClusterId, long iPosition, int iVersion, byte[] iBuffer);
public ORecordAbstract<?> setIdentity(int iClusterId, long iClusterPosition);
diff --git a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordSchemaAware.java b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordSchemaAware.java
index baf0b555589..02f13d261f0 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordSchemaAware.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordSchemaAware.java
@@ -51,5 +51,6 @@ public interface ORecordSchemaAware<T> extends ORecordInternal<T> {
public void validate() throws OValidationException;
- public ORecordSchemaAware<T> fill(ODatabaseRecord iDatabase, int iClassId, int iClusterId, long iPosition, int iVersion);
+ public ORecordSchemaAware<T> fill(ODatabaseRecord iDatabase, int iClassId, int iClusterId, long iPosition, int iVersion,
+ byte[] iBuffer);
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordSchemaAwareAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordSchemaAwareAbstract.java
index a3501a6b7d2..01802a8bbc0 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordSchemaAwareAbstract.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordSchemaAwareAbstract.java
@@ -40,8 +40,8 @@ public ORecordSchemaAwareAbstract(final ODatabaseRecord iDatabase) {
}
public ORecordSchemaAwareAbstract<T> fill(final ODatabaseRecord iDatabase, final int iClassId, final int iClusterId,
- final long iPosition, final int iVersion) {
- super.fill(iDatabase, iClusterId, iPosition, iVersion);
+ final long iPosition, final int iVersion, final byte[] iBuffer) {
+ super.fill(iDatabase, iClusterId, iPosition, iVersion, iBuffer);
setClass(_database.getMetadata().getSchema().getClassById(iClassId));
return this;
}
diff --git a/server/src/main/java/com/orientechnologies/orient/server/tx/OTransactionRecordProxy.java b/server/src/main/java/com/orientechnologies/orient/server/tx/OTransactionRecordProxy.java
index 6bc891752eb..5e2855ec9bf 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/tx/OTransactionRecordProxy.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/tx/OTransactionRecordProxy.java
@@ -167,7 +167,8 @@ public void setStatus(com.orientechnologies.orient.core.record.ORecord.STATUS iS
}
@Override
- public ORecordAbstract<?> fill(final ODatabaseRecord iDatabase, final int iClusterId, final long iPosition, final int iVersion) {
+ public ORecordAbstract<?> fill(final ODatabaseRecord iDatabase, final int iClusterId, final long iPosition, final int iVersion,
+ final byte[] iBuffer) {
return null;
}
|
903ee0d0a6f1254e054a8413575b059f77d3899d
|
restlet-framework-java
|
- Fixed unit test due to random Java method- introspection order--
|
c
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet.test/src/org/restlet/test/resource/AnnotatedResource8TestCase.java b/modules/org.restlet.test/src/org/restlet/test/resource/AnnotatedResource8TestCase.java
index 375d0fcc70..2a545cbe5f 100644
--- a/modules/org.restlet.test/src/org/restlet/test/resource/AnnotatedResource8TestCase.java
+++ b/modules/org.restlet.test/src/org/restlet/test/resource/AnnotatedResource8TestCase.java
@@ -35,7 +35,6 @@
import java.io.IOException;
-import org.restlet.data.Form;
import org.restlet.data.MediaType;
import org.restlet.representation.Representation;
import org.restlet.representation.StringRepresentation;
@@ -69,40 +68,43 @@ protected void tearDown() throws Exception {
}
public void testPost() throws IOException, ResourceException {
- Representation input = new StringRepresentation("<root/>",
+ Representation input = new StringRepresentation("root",
MediaType.APPLICATION_XML);
Representation result = clientResource.post(input,
MediaType.APPLICATION_XML);
assertNotNull(result);
- assertEquals("<root/>1", result.getText());
+ assertEquals("root1", result.getText());
assertEquals(MediaType.APPLICATION_XML, result.getMediaType());
- input = new StringRepresentation("<root/>", MediaType.APPLICATION_XML);
+ input = new StringRepresentation("root", MediaType.APPLICATION_XML);
result = clientResource.post(input, MediaType.APPLICATION_JSON);
assertNotNull(result);
- assertEquals("<root/>2", result.getText());
+ assertEquals("root1", result.getText());
assertEquals(MediaType.APPLICATION_JSON, result.getMediaType());
- input = new StringRepresentation("root=true",
- MediaType.APPLICATION_WWW_FORM);
+ input = new StringRepresentation("root", MediaType.APPLICATION_JSON);
result = clientResource.post(input, MediaType.APPLICATION_JSON);
assertNotNull(result);
- assertEquals("root=true3", result.getText());
+ assertEquals("root1", result.getText());
assertEquals(MediaType.APPLICATION_JSON, result.getMediaType());
- Form inputForm = new Form();
- inputForm.add("root", "true");
- result = clientResource.post(inputForm, MediaType.APPLICATION_JSON);
+ input = new StringRepresentation("root", MediaType.APPLICATION_JSON);
+ result = clientResource.post(input, MediaType.APPLICATION_XML);
assertNotNull(result);
- assertEquals("root=true3", result.getText());
- assertEquals(MediaType.APPLICATION_JSON, result.getMediaType());
+ assertEquals("root1", result.getText());
+ assertEquals(MediaType.APPLICATION_XML, result.getMediaType());
- input = new StringRepresentation("[root]", MediaType.APPLICATION_JSON);
- result = clientResource.post(input, MediaType.APPLICATION_JSON);
+ input = new StringRepresentation("root", MediaType.APPLICATION_WWW_FORM);
+ result = clientResource.post(input, MediaType.APPLICATION_WWW_FORM);
assertNotNull(result);
- assertEquals("[root]2", result.getText());
- assertEquals(MediaType.APPLICATION_JSON, result.getMediaType());
+ assertEquals("root2", result.getText());
+ assertEquals(MediaType.APPLICATION_WWW_FORM, result.getMediaType());
+ input = new StringRepresentation("root", MediaType.APPLICATION_WWW_FORM);
+ result = clientResource.post(input, MediaType.TEXT_HTML);
+ assertNotNull(result);
+ assertEquals("root2", result.getText());
+ assertEquals(MediaType.TEXT_HTML, result.getMediaType());
}
}
diff --git a/modules/org.restlet.test/src/org/restlet/test/resource/MyResource8.java b/modules/org.restlet.test/src/org/restlet/test/resource/MyResource8.java
index 1edfae72b7..309d1c617d 100644
--- a/modules/org.restlet.test/src/org/restlet/test/resource/MyResource8.java
+++ b/modules/org.restlet.test/src/org/restlet/test/resource/MyResource8.java
@@ -38,19 +38,14 @@
public class MyResource8 extends ServerResource {
- @Post("xml|json:xml")
- public String storeForm(String entity) {
- return entity + "1";
- }
-
- @Post("xml|json:json|html")
+ @Post("xml|json:xml|json")
public String store1(String entity) {
- return entity + "2";
+ return entity + "1";
}
- @Post("form|json:json|html")
+ @Post("form|html:form|html")
public String store2(String entity) {
- return entity + "3";
+ return entity + "2";
}
}
|
ab4b279e6a966d5410f581dc51951046a283a70c
|
restlet-framework-java
|
Fixed test case.--
|
c
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet.test/src/org/restlet/test/engine/ChunkedEncodingPutTestCase.java b/modules/org.restlet.test/src/org/restlet/test/engine/ChunkedEncodingPutTestCase.java
index 6380fc699b..619472cb84 100644
--- a/modules/org.restlet.test/src/org/restlet/test/engine/ChunkedEncodingPutTestCase.java
+++ b/modules/org.restlet.test/src/org/restlet/test/engine/ChunkedEncodingPutTestCase.java
@@ -30,8 +30,6 @@
package org.restlet.test.engine;
-import java.io.IOException;
-
import org.restlet.Application;
import org.restlet.Client;
import org.restlet.Component;
@@ -69,13 +67,7 @@ public PutTestResource() {
@Override
public Representation put(Representation entity) {
- String str = null;
- try {
- str = entity.getText();
- } catch (IOException e) {
- e.printStackTrace();
- }
- return new StringRepresentation(str, MediaType.TEXT_PLAIN);
+ return entity;
}
}
|
5f9b4443194d3aa3948d76956897c0a1d918d546
|
spring-framework
|
bean properties of type enum array/collection can- be populated with comma-separated String (SPR-6547)--
|
c
|
https://github.com/spring-projects/spring-framework
|
diff --git a/org.springframework.beans/src/main/java/org/springframework/beans/TypeConverterDelegate.java b/org.springframework.beans/src/main/java/org/springframework/beans/TypeConverterDelegate.java
index 85100b836c2d..049a0694385f 100644
--- a/org.springframework.beans/src/main/java/org/springframework/beans/TypeConverterDelegate.java
+++ b/org.springframework.beans/src/main/java/org/springframework/beans/TypeConverterDelegate.java
@@ -202,6 +202,13 @@ private <T> T convertIfNecessary(String propertyName, Object oldValue, Object ne
// Value not of required type?
if (editor != null || (requiredType != null && !ClassUtils.isAssignableValue(requiredType, convertedValue))) {
+ if (requiredType != null && Collection.class.isAssignableFrom(requiredType) &&
+ convertedValue instanceof String && typeDescriptor.getMethodParameter() != null) {
+ Class elementType = GenericCollectionTypeResolver.getCollectionParameterType(typeDescriptor.getMethodParameter());
+ if (elementType != null && Enum.class.isAssignableFrom(elementType)) {
+ convertedValue = StringUtils.commaDelimitedListToStringArray((String) convertedValue);
+ }
+ }
if (editor == null) {
editor = findDefaultEditor(requiredType, typeDescriptor);
}
@@ -214,6 +221,9 @@ private <T> T convertIfNecessary(String propertyName, Object oldValue, Object ne
if (convertedValue != null) {
if (requiredType.isArray()) {
// Array required -> apply appropriate conversion of elements.
+ if (convertedValue instanceof String && Enum.class.isAssignableFrom(requiredType.getComponentType())) {
+ convertedValue = StringUtils.commaDelimitedListToStringArray((String) convertedValue);
+ }
return (T) convertToTypedArray(convertedValue, propertyName, requiredType.getComponentType());
}
else if (convertedValue instanceof Collection) {
diff --git a/org.springframework.beans/src/test/java/org/springframework/beans/BeanWrapperEnumTests.java b/org.springframework.beans/src/test/java/org/springframework/beans/BeanWrapperEnumTests.java
index 43182fe69acd..d41aa7a215e6 100644
--- a/org.springframework.beans/src/test/java/org/springframework/beans/BeanWrapperEnumTests.java
+++ b/org.springframework.beans/src/test/java/org/springframework/beans/BeanWrapperEnumTests.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2008 the original author or authors.
+ * Copyright 2002-2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,9 +17,7 @@
package org.springframework.beans;
import static org.junit.Assert.*;
-
import org.junit.Test;
-
import test.beans.CustomEnum;
import test.beans.GenericBean;
@@ -53,4 +51,62 @@ public void testCustomEnumWithEmptyString() {
assertEquals(null, gb.getCustomEnum());
}
+ @Test
+ public void testCustomEnumArrayWithSingleValue() {
+ GenericBean<?> gb = new GenericBean<Object>();
+ BeanWrapper bw = new BeanWrapperImpl(gb);
+ bw.setPropertyValue("customEnumArray", "VALUE_1");
+ assertEquals(1, gb.getCustomEnumArray().length);
+ assertEquals(CustomEnum.VALUE_1, gb.getCustomEnumArray()[0]);
+ }
+
+ @Test
+ public void testCustomEnumArrayWithMultipleValues() {
+ GenericBean<?> gb = new GenericBean<Object>();
+ BeanWrapper bw = new BeanWrapperImpl(gb);
+ bw.setPropertyValue("customEnumArray", new String[] {"VALUE_1", "VALUE_2"});
+ assertEquals(2, gb.getCustomEnumArray().length);
+ assertEquals(CustomEnum.VALUE_1, gb.getCustomEnumArray()[0]);
+ assertEquals(CustomEnum.VALUE_2, gb.getCustomEnumArray()[1]);
+ }
+
+ @Test
+ public void testCustomEnumArrayWithMultipleValuesAsCsv() {
+ GenericBean<?> gb = new GenericBean<Object>();
+ BeanWrapper bw = new BeanWrapperImpl(gb);
+ bw.setPropertyValue("customEnumArray", "VALUE_1,VALUE_2");
+ assertEquals(2, gb.getCustomEnumArray().length);
+ assertEquals(CustomEnum.VALUE_1, gb.getCustomEnumArray()[0]);
+ assertEquals(CustomEnum.VALUE_2, gb.getCustomEnumArray()[1]);
+ }
+
+ @Test
+ public void testCustomEnumSetWithSingleValue() {
+ GenericBean<?> gb = new GenericBean<Object>();
+ BeanWrapper bw = new BeanWrapperImpl(gb);
+ bw.setPropertyValue("customEnumSet", "VALUE_1");
+ assertEquals(1, gb.getCustomEnumSet().size());
+ assertTrue(gb.getCustomEnumSet().contains(CustomEnum.VALUE_1));
+ }
+
+ @Test
+ public void testCustomEnumSetWithMultipleValues() {
+ GenericBean<?> gb = new GenericBean<Object>();
+ BeanWrapper bw = new BeanWrapperImpl(gb);
+ bw.setPropertyValue("customEnumSet", new String[] {"VALUE_1", "VALUE_2"});
+ assertEquals(2, gb.getCustomEnumSet().size());
+ assertTrue(gb.getCustomEnumSet().contains(CustomEnum.VALUE_1));
+ assertTrue(gb.getCustomEnumSet().contains(CustomEnum.VALUE_2));
+ }
+
+ @Test
+ public void testCustomEnumSetWithMultipleValuesAsCsv() {
+ GenericBean<?> gb = new GenericBean<Object>();
+ BeanWrapper bw = new BeanWrapperImpl(gb);
+ bw.setPropertyValue("customEnumSet", "VALUE_1,VALUE_2");
+ assertEquals(2, gb.getCustomEnumSet().size());
+ assertTrue(gb.getCustomEnumSet().contains(CustomEnum.VALUE_1));
+ assertTrue(gb.getCustomEnumSet().contains(CustomEnum.VALUE_2));
+ }
+
}
diff --git a/org.springframework.beans/src/test/java/test/beans/GenericBean.java b/org.springframework.beans/src/test/java/test/beans/GenericBean.java
index 25f61c4aa756..acb9bdb76e5a 100644
--- a/org.springframework.beans/src/test/java/test/beans/GenericBean.java
+++ b/org.springframework.beans/src/test/java/test/beans/GenericBean.java
@@ -60,11 +60,14 @@ public class GenericBean<T> {
private CustomEnum customEnum;
+ private CustomEnum[] customEnumArray;
+
+ private Set<CustomEnum> customEnumSet;
+
private T genericProperty;
private List<T> genericListProperty;
-
public GenericBean() {
}
@@ -225,6 +228,22 @@ public void setCustomEnum(CustomEnum customEnum) {
this.customEnum = customEnum;
}
+ public CustomEnum[] getCustomEnumArray() {
+ return customEnumArray;
+ }
+
+ public void setCustomEnumArray(CustomEnum[] customEnum) {
+ this.customEnumArray = customEnum;
+ }
+
+ public Set<CustomEnum> getCustomEnumSet() {
+ return customEnumSet;
+ }
+
+ public void setCustomEnumSet(Set<CustomEnum> customEnumSet) {
+ this.customEnumSet = customEnumSet;
+ }
+
public static GenericBean createInstance(Set<Integer> integerSet) {
return new GenericBean(integerSet);
}
|
ca2f1678d75d9d42867f2124fa3e7dfcf1c367f7
|
hbase
|
HBASE-2757. Fix flaky TestFromClientSide test by- forcing region assignment--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@956716 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index 6bba3feb2be6..a0cfef66001f 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -403,6 +403,7 @@ Release 0.21.0 - Unreleased
HBASE-2760 Fix MetaScanner TableNotFoundException when scanning starting at
the first row in a table.
HBASE-1025 Reconstruction log playback has no bounds on memory used
+ HBASE-2757 Fix flaky TestFromClientSide test by forcing region assignment
IMPROVEMENTS
HBASE-1760 Cleanup TODOs in HTable
diff --git a/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index 74a0c63c9a96..fc7b2afa7132 100644
--- a/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -3637,7 +3637,13 @@ public void testRegionCachePreWarm() throws Exception {
// create many regions for the table.
TEST_UTIL.createMultiRegions(table, FAMILY);
-
+ // This count effectively waits until the regions have been
+ // fully assigned
+ TEST_UTIL.countRows(table);
+ table.getConnection().clearRegionCache();
+ assertEquals("Clearing cache should have 0 cached ", 0,
+ HConnectionManager.getCachedRegionCount(conf, TABLENAME));
+
// A Get is suppose to do a region lookup request
Get g = new Get(Bytes.toBytes("aaa"));
table.get(g);
|
f6990e06b4d2dbffb2758ab89ef855ba3947bcf3
|
hadoop
|
HADOOP-6375. Sync documentation for FsShell du with- its implementation. Contributed by Todd Lipcon--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@883206 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/hadoop
|
diff --git a/CHANGES.txt b/CHANGES.txt
index 529a545f2cff0..a5c7b5cbae47d 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -1162,6 +1162,9 @@ Release 0.21.0 - Unreleased
HADOOP-6347. run-test-core-fault-inject runs a test case twice if
-Dtestcase is set (cos)
+ HADOOP-6375. Sync documentation for FsShell du with its implementation.
+ (Todd Lipcon via cdouglas)
+
Release 0.20.2 - Unreleased
NEW FEATURES
diff --git a/src/docs/src/documentation/content/xdocs/file_system_shell.xml b/src/docs/src/documentation/content/xdocs/file_system_shell.xml
index bbc0ddca47705..54429b0eb6ce4 100644
--- a/src/docs/src/documentation/content/xdocs/file_system_shell.xml
+++ b/src/docs/src/documentation/content/xdocs/file_system_shell.xml
@@ -191,10 +191,16 @@
<section>
<title>du</title>
<p>
- <code>Usage: hdfs dfs -du URI [URI …]</code>
+ <code>Usage: hdfs dfs -du [-s] [-h] URI [URI …]</code>
</p>
<p>
- Displays aggregate length of files contained in the directory or the length of a file in case its just a file.<br/>
+ Displays sizes of files and directories contained in the given directory or the length of a file in case its just a file.</p>
+ <p>Options:</p>
+ <ul>
+ <li>The <code>-s</code> option will result in an aggregate summary of file lengths being displayed, rather than the individual files.</li>
+ <li>The <code>-h</code> option will format file sizes in a "human-readable" fashion (e.g 64.0m instead of 67108864)</li>
+ </ul>
+ <p>
Example:<br/><code>hdfs dfs -du /user/hadoop/dir1 /user/hadoop/file1 hdfs://nn.example.com/user/hadoop/dir1</code><br/>
Exit Code:<br/><code> Returns 0 on success and -1 on error. </code><br/></p>
</section>
@@ -206,7 +212,7 @@
<code>Usage: hdfs dfs -dus <args></code>
</p>
<p>
- Displays a summary of file lengths.
+ Displays a summary of file lengths. This is an alternate form of <code>hdfs dfs -du -s</code>.
</p>
</section>
diff --git a/src/java/org/apache/hadoop/fs/FsShell.java b/src/java/org/apache/hadoop/fs/FsShell.java
index e3c895b252c4f..e5501f18825e3 100644
--- a/src/java/org/apache/hadoop/fs/FsShell.java
+++ b/src/java/org/apache/hadoop/fs/FsShell.java
@@ -1339,7 +1339,7 @@ private void printHelp(String cmd) {
String summary = "hadoop fs is the command to execute fs commands. " +
"The full syntax is: \n\n" +
"hadoop fs [-fs <local | file system URI>] [-conf <configuration file>]\n\t" +
- "[-D <property=value>] [-ls <path>] [-lsr <path>] [-df [<path>]] [-du <path>]\n\t" +
+ "[-D <property=value>] [-ls <path>] [-lsr <path>] [-df [<path>]] [-du [-s] [-h] <path>]\n\t" +
"[-dus <path>] [-mv <src> <dst>] [-cp <src> <dst>] [-rm [-skipTrash] <src>]\n\t" +
"[-rmr [-skipTrash] <src>] [-put <localsrc> ... <dst>] [-copyFromLocal <localsrc> ... <dst>]\n\t" +
"[-moveFromLocal <localsrc> ... <dst>] [" +
@@ -1389,17 +1389,20 @@ private void printHelp(String cmd) {
"\t\tIf the filesystem has multiple partitions, and no path to a particular partition\n"+
"\t\tis specified, then the status of the root partitions will be shown.\n";
- String du = "-du <path>: \tShow the amount of space, in bytes, used by the files that \n" +
- "\t\tmatch the specified file pattern. Equivalent to the unix\n" +
- "\t\tcommand \"du -sb <path>/*\" in case of a directory, \n" +
- "\t\tand to \"du -b <path>\" in case of a file.\n" +
+ String du = "-du [-s] [-h] <path>: \tShow the amount of space, in bytes, used by the files that \n" +
+ "\t\tmatch the specified file pattern. The following flags are optional:\n" +
+ "\t\t -s Rather than showing the size of each individual file that\n" +
+ "\t\t matches the pattern, shows the total (summary) size.\n" +
+ "\t\t -h Formats the sizes of files in a human-readable fashion\n" +
+ "\t\t rather than a number of bytes.\n" +
+ "\n" +
+ "\t\tNote that, even without the -s option, this only shows size summaries\n" +
+ "\t\tone level deep into a directory.\n" +
"\t\tThe output is in the form \n" +
- "\t\t\tname(full path) size (in bytes)\n";
+ "\t\t\tsize\tname(full path)\n";
String dus = "-dus <path>: \tShow the amount of space, in bytes, used by the files that \n" +
- "\t\tmatch the specified file pattern. Equivalent to the unix\n" +
- "\t\tcommand \"du -sb\" The output is in the form \n" +
- "\t\t\tname(full path) size (in bytes)\n";
+ "\t\tmatch the specified file pattern. This is equivalent to -du -s above.\n";
String mv = "-mv <src> <dst>: Move files that match the specified file pattern <src>\n" +
"\t\tto a destination <dst>. When moving multiple files, the \n" +
@@ -1744,7 +1747,7 @@ private static void printUsage(String cmd) {
System.err.println(" [-ls <path>]");
System.err.println(" [-lsr <path>]");
System.err.println(" [-df [<path>]]");
- System.err.println(" [-du <path>]");
+ System.err.println(" [-du [-s] [-h] <path>]");
System.err.println(" [-dus <path>]");
System.err.println(" [" + Count.USAGE + "]");
System.err.println(" [-mv <src> <dst>]");
|
eb9af4d3708a91c6442966dc38ee1d6139d361c7
|
ReactiveX-RxJava
|
Fixed a bug in the test scheduler that happened- when advancing time by a too little amount--
|
c
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/concurrency/TestScheduler.java b/rxjava-core/src/main/java/rx/concurrency/TestScheduler.java
index c785031df2..75ba1e665a 100644
--- a/rxjava-core/src/main/java/rx/concurrency/TestScheduler.java
+++ b/rxjava-core/src/main/java/rx/concurrency/TestScheduler.java
@@ -79,6 +79,7 @@ private void triggerActions(long targetTimeInNanos) {
while (!queue.isEmpty()) {
TimedAction<?> current = queue.peek();
if (current.time > targetTimeInNanos) {
+ time = targetTimeInNanos;
break;
}
time = current.time;
|
2c95fae7ad7c87c96fad2e4c80c3580ea3b06cf3
|
orientdb
|
Implemented issue 109:- http://code.google.com/p/orient/issues/detail?id=109 About in-memory clusters- inside regular persistent databases.--
|
a
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/config/OStorageConfiguration.java b/core/src/main/java/com/orientechnologies/orient/core/config/OStorageConfiguration.java
index 8a282499a36..40b08aa627e 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/config/OStorageConfiguration.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/config/OStorageConfiguration.java
@@ -30,288 +30,298 @@
import com.orientechnologies.orient.core.storage.OStorage;
public class OStorageConfiguration implements OSerializableStream {
- public static final int CONFIG_RECORD_NUM = 0;
-
- public int version = 0;
- public String name;
- public String schemaRecordId;
- public String dictionaryRecordId;
-
- public String localeLanguage = Locale.getDefault().getLanguage();
- public String localeCountry = Locale.getDefault().getCountry();
- public String dateFormat = "yyyy-MM-dd";
- public String dateTimeFormat = "yyyy-MM-dd hh:mm:ss";
-
- public List<OStorageClusterConfiguration> clusters = new ArrayList<OStorageClusterConfiguration>();
- public List<OStorageDataConfiguration> dataSegments = new ArrayList<OStorageDataConfiguration>();
-
- public OStorageTxConfiguration txSegment = new OStorageTxConfiguration();
-
- public List<OEntryConfiguration> properties = new ArrayList<OEntryConfiguration>();
-
- private transient Locale localeInstance;
- private transient DateFormat dateFormatInstance;
- private transient DateFormat dateTimeFormatInstance;
- private transient DecimalFormatSymbols unusualSymbols;
- private transient OStorage storage;
- private transient byte[] record;
-
- private static final int FIXED_CONFIG_SIZE = 20000;
-
- public OStorageConfiguration load() throws IOException {
- record = storage.readRecord(null, -1, storage.getClusterIdByName(OStorage.CLUSTER_INTERNAL_NAME), CONFIG_RECORD_NUM, null).buffer;
- fromStream(record);
- return this;
- }
-
- public void update() throws IOException {
- if (record == null)
- return;
-
- record = toStream();
- storage.updateRecord(-1, storage.getClusterIdByName(OStorage.CLUSTER_INTERNAL_NAME), 0, record, -1, ORecordBytes.RECORD_TYPE);
- }
-
- public void create() throws IOException {
- record = toStream();
- storage.createRecord(storage.getClusterIdByName(OStorage.CLUSTER_INTERNAL_NAME), record, ORecordBytes.RECORD_TYPE);
- }
-
- public OStorageConfiguration(final OStorage iStorage) {
- storage = iStorage;
- }
-
- public boolean isEmpty() {
- return clusters.isEmpty();
- }
-
- public Locale getLocaleInstance() {
- if (localeInstance == null)
- localeInstance = new Locale(localeLanguage, localeCountry);
-
- return localeInstance;
- }
-
- public DateFormat getDateFormatInstance() {
- if (dateFormatInstance == null) {
- dateFormatInstance = new SimpleDateFormat(dateFormat);
- dateFormatInstance.setLenient(false);
- }
- return dateFormatInstance;
- }
-
- public DateFormat getDateTimeFormatInstance() {
- if (dateTimeFormatInstance == null) {
- dateTimeFormatInstance = new SimpleDateFormat(dateTimeFormat);
- dateTimeFormatInstance.setLenient(false);
- }
- return dateTimeFormatInstance;
- }
-
- public DecimalFormatSymbols getUnusualSymbols() {
- if (unusualSymbols == null)
- unusualSymbols = new DecimalFormatSymbols(getLocaleInstance());
- return unusualSymbols;
- }
-
- public OSerializableStream fromStream(byte[] iStream) throws IOException {
- String[] values = new String(iStream).split("\\|");
- int index = 0;
- version = Integer.parseInt(read(values[index++]));
- name = read(values[index++]);
-
- schemaRecordId = read(values[index++]);
- dictionaryRecordId = read(values[index++]);
-
- localeLanguage = read(values[index++]);
- localeCountry = read(values[index++]);
- dateFormat = read(values[index++]);
- dateTimeFormat = read(values[index++]);
-
- int size = Integer.parseInt(read(values[index++]));
- String clusterType;
- int clusterId;
- String clusterName;
-
- // PREPARE THE LIST OF CLUSTERS
- clusters = new ArrayList<OStorageClusterConfiguration>(size);
- for (int i = 0; i < size; ++i)
- clusters.add(null);
-
- OStoragePhysicalClusterConfiguration phyCluster;
- OStorageLogicalClusterConfiguration logCluster;
-
- for (int i = 0; i < size; ++i) {
- clusterId = Integer.parseInt(read(values[index++]));
- clusterName = read(values[index++]);
-
- clusterType = read(values[index++]);
-
- // PHYSICAL CLUSTER
- if (clusterType.equals("p")) {
- phyCluster = new OStoragePhysicalClusterConfiguration(this, clusterId);
- phyCluster.name = clusterName;
- index = phySegmentFromStream(values, index, phyCluster);
- phyCluster.holeFile = new OStorageClusterHoleConfiguration(phyCluster, read(values[index++]), read(values[index++]),
- read(values[index++]));
- clusters.set(clusterId, phyCluster);
- } else {
- // LOGICAL CLUSTER
- logCluster = new OStorageLogicalClusterConfiguration(clusterName, clusterId, Integer.parseInt(read(values[index++])),
- new ORecordId(values[index++]));
- clusters.set(clusterId, logCluster);
- }
- }
-
- // PREPARE THE LIST OF DATA SEGS
- size = Integer.parseInt(read(values[index++]));
- dataSegments = new ArrayList<OStorageDataConfiguration>(size);
- for (int i = 0; i < size; ++i)
- dataSegments.add(null);
-
- int dataId;
- String dataName;
- OStorageDataConfiguration data;
- for (int i = 0; i < size; ++i) {
- dataId = Integer.parseInt(read(values[index++]));
- dataName = read(values[index++]);
-
- data = new OStorageDataConfiguration(this, dataName);
- index = phySegmentFromStream(values, index, data);
- data.holeFile = new OStorageDataHoleConfiguration(data, read(values[index++]), read(values[index++]), read(values[index++]));
- dataSegments.set(dataId, data);
- }
-
- txSegment = new OStorageTxConfiguration(read(values[index++]), read(values[index++]), read(values[index++]),
- read(values[index++]), read(values[index++]));
-
- size = Integer.parseInt(read(values[index++]));
- properties = new ArrayList<OEntryConfiguration>(size);
- for (int i = 0; i < size; ++i) {
- properties.add(new OEntryConfiguration(read(values[index++]), read(values[index++])));
- }
-
- return this;
- }
-
- public byte[] toStream() throws IOException {
- StringBuilder buffer = new StringBuilder();
-
- write(buffer, version);
- write(buffer, name);
-
- write(buffer, schemaRecordId);
- write(buffer, dictionaryRecordId);
-
- write(buffer, localeLanguage);
- write(buffer, localeCountry);
- write(buffer, dateFormat);
- write(buffer, dateTimeFormat);
-
- write(buffer, clusters.size());
- for (OStorageClusterConfiguration c : clusters) {
- if (c == null)
- continue;
-
- write(buffer, c.getId());
- write(buffer, c.getName());
-
- if (c instanceof OStoragePhysicalClusterConfiguration) {
- write(buffer, "p");
- phySegmentToStream(buffer, (OStoragePhysicalClusterConfiguration) c);
- fileToStream(buffer, ((OStoragePhysicalClusterConfiguration) c).holeFile);
- } else {
- write(buffer, "l");
- logSegmentToStream(buffer, (OStorageLogicalClusterConfiguration) c);
- }
- }
-
- write(buffer, dataSegments.size());
- for (OStorageDataConfiguration d : dataSegments) {
- if (d == null)
- continue;
-
- write(buffer, d.id);
- write(buffer, d.name);
-
- phySegmentToStream(buffer, d);
- fileToStream(buffer, d.holeFile);
- }
-
- fileToStream(buffer, txSegment);
- write(buffer, txSegment.isSynchRecord());
- write(buffer, txSegment.isSynchTx());
-
- write(buffer, properties.size());
- for (OEntryConfiguration e : properties)
- entryToStream(buffer, e);
-
- if (buffer.length() > FIXED_CONFIG_SIZE)
- throw new OConfigurationException("Configuration data exceeded size limit: " + FIXED_CONFIG_SIZE + " bytes");
-
- // ALLOCATE ENOUGHT SPACE TO REUSE IT EVERY TIME
- buffer.append("|");
- buffer.setLength(FIXED_CONFIG_SIZE);
-
- return buffer.toString().getBytes();
- }
-
- private int phySegmentFromStream(final String[] values, int index, final OStorageSegmentConfiguration iSegment) {
- iSegment.maxSize = read(values[index++]);
- iSegment.fileType = read(values[index++]);
- iSegment.fileStartSize = read(values[index++]);
- iSegment.fileMaxSize = read(values[index++]);
- iSegment.fileIncrementSize = read(values[index++]);
- iSegment.defrag = read(values[index++]);
-
- final int size = Integer.parseInt(read(values[index++]));
- iSegment.infoFiles = new OStorageFileConfiguration[size];
- for (int i = 0; i < size; ++i) {
- iSegment.infoFiles[i] = new OStorageFileConfiguration(iSegment, read(values[index++]), read(values[index++]),
- read(values[index++]), iSegment.fileIncrementSize);
- }
-
- return index;
- }
-
- private void phySegmentToStream(final StringBuilder iBuffer, final OStorageSegmentConfiguration iSegment) {
- write(iBuffer, iSegment.maxSize);
- write(iBuffer, iSegment.fileType);
- write(iBuffer, iSegment.fileStartSize);
- write(iBuffer, iSegment.fileMaxSize);
- write(iBuffer, iSegment.fileIncrementSize);
- write(iBuffer, iSegment.defrag);
-
- write(iBuffer, iSegment.infoFiles.length);
- for (OStorageFileConfiguration f : iSegment.infoFiles)
- fileToStream(iBuffer, f);
- }
-
- private void logSegmentToStream(final StringBuilder iBuffer, final OStorageLogicalClusterConfiguration iSegment) {
- write(iBuffer, iSegment.physicalClusterId);
- write(iBuffer, iSegment.map.toString());
- }
-
- private void fileToStream(final StringBuilder iBuffer, final OStorageFileConfiguration iFile) {
- write(iBuffer, iFile.path);
- write(iBuffer, iFile.type);
- write(iBuffer, iFile.maxSize);
- }
-
- private void entryToStream(final StringBuilder iBuffer, final OEntryConfiguration iEntry) {
- write(iBuffer, iEntry.name);
- write(iBuffer, iEntry.value);
- }
-
- private String read(final String iValue) {
- if (iValue.equals(" "))
- return null;
- return iValue;
- }
-
- private void write(final StringBuilder iBuffer, final Object iValue) {
- if (iBuffer.length() > 0)
- iBuffer.append("|");
- iBuffer.append(iValue != null ? iValue.toString() : " ");
- }
+ public static final int CONFIG_RECORD_NUM = 0;
+
+ public int version = 0;
+ public String name;
+ public String schemaRecordId;
+ public String dictionaryRecordId;
+
+ public String localeLanguage = Locale.getDefault().getLanguage();
+ public String localeCountry = Locale.getDefault().getCountry();
+ public String dateFormat = "yyyy-MM-dd";
+ public String dateTimeFormat = "yyyy-MM-dd hh:mm:ss";
+
+ public List<OStorageClusterConfiguration> clusters = new ArrayList<OStorageClusterConfiguration>();
+ public List<OStorageDataConfiguration> dataSegments = new ArrayList<OStorageDataConfiguration>();
+
+ public OStorageTxConfiguration txSegment = new OStorageTxConfiguration();
+
+ public List<OEntryConfiguration> properties = new ArrayList<OEntryConfiguration>();
+
+ private transient Locale localeInstance;
+ private transient DateFormat dateFormatInstance;
+ private transient DateFormat dateTimeFormatInstance;
+ private transient DecimalFormatSymbols unusualSymbols;
+ private transient OStorage storage;
+ private transient byte[] record;
+
+ private static final int FIXED_CONFIG_SIZE = 20000;
+
+ public OStorageConfiguration load() throws IOException {
+ record = storage.readRecord(null, -1, storage.getClusterIdByName(OStorage.CLUSTER_INTERNAL_NAME), CONFIG_RECORD_NUM, null).buffer;
+ fromStream(record);
+ return this;
+ }
+
+ public void update() throws IOException {
+ if (record == null)
+ return;
+
+ record = toStream();
+ storage.updateRecord(-1, storage.getClusterIdByName(OStorage.CLUSTER_INTERNAL_NAME), 0, record, -1, ORecordBytes.RECORD_TYPE);
+ }
+
+ public void create() throws IOException {
+ record = toStream();
+ storage.createRecord(storage.getClusterIdByName(OStorage.CLUSTER_INTERNAL_NAME), record, ORecordBytes.RECORD_TYPE);
+ }
+
+ public OStorageConfiguration(final OStorage iStorage) {
+ storage = iStorage;
+ }
+
+ public boolean isEmpty() {
+ return clusters.isEmpty();
+ }
+
+ public Locale getLocaleInstance() {
+ if (localeInstance == null)
+ localeInstance = new Locale(localeLanguage, localeCountry);
+
+ return localeInstance;
+ }
+
+ public DateFormat getDateFormatInstance() {
+ if (dateFormatInstance == null) {
+ dateFormatInstance = new SimpleDateFormat(dateFormat);
+ dateFormatInstance.setLenient(false);
+ }
+ return dateFormatInstance;
+ }
+
+ public DateFormat getDateTimeFormatInstance() {
+ if (dateTimeFormatInstance == null) {
+ dateTimeFormatInstance = new SimpleDateFormat(dateTimeFormat);
+ dateTimeFormatInstance.setLenient(false);
+ }
+ return dateTimeFormatInstance;
+ }
+
+ public DecimalFormatSymbols getUnusualSymbols() {
+ if (unusualSymbols == null)
+ unusualSymbols = new DecimalFormatSymbols(getLocaleInstance());
+ return unusualSymbols;
+ }
+
+ public OSerializableStream fromStream(byte[] iStream) throws IOException {
+ String[] values = new String(iStream).split("\\|");
+ int index = 0;
+ version = Integer.parseInt(read(values[index++]));
+ name = read(values[index++]);
+
+ schemaRecordId = read(values[index++]);
+ dictionaryRecordId = read(values[index++]);
+
+ localeLanguage = read(values[index++]);
+ localeCountry = read(values[index++]);
+ dateFormat = read(values[index++]);
+ dateTimeFormat = read(values[index++]);
+
+ int size = Integer.parseInt(read(values[index++]));
+ String clusterType;
+ int clusterId;
+ String clusterName;
+
+ // PREPARE THE LIST OF CLUSTERS
+ clusters = new ArrayList<OStorageClusterConfiguration>(size);
+ for (int i = 0; i < size; ++i)
+ clusters.add(null);
+
+ OStoragePhysicalClusterConfiguration phyCluster;
+ OStorageLogicalClusterConfiguration logCluster;
+ OStorageMemoryClusterConfiguration memCluster;
+
+ for (int i = 0; i < size; ++i) {
+ clusterId = Integer.parseInt(read(values[index++]));
+ clusterName = read(values[index++]);
+
+ clusterType = read(values[index++]);
+
+ // PHYSICAL CLUSTER
+ if (clusterType.equals("p")) {
+ phyCluster = new OStoragePhysicalClusterConfiguration(this, clusterId);
+ phyCluster.name = clusterName;
+ index = phySegmentFromStream(values, index, phyCluster);
+ phyCluster.holeFile = new OStorageClusterHoleConfiguration(phyCluster, read(values[index++]), read(values[index++]),
+ read(values[index++]));
+ clusters.set(clusterId, phyCluster);
+ } else if (clusterType.equals("l")) {
+ // LOGICAL CLUSTER
+ logCluster = new OStorageLogicalClusterConfiguration(clusterName, clusterId, Integer.parseInt(read(values[index++])),
+ new ORecordId(values[index++]));
+ clusters.set(clusterId, logCluster);
+ } else {
+ // MEMORY CLUSTER
+ memCluster = new OStorageMemoryClusterConfiguration(clusterName, clusterId);
+ clusters.set(clusterId, memCluster);
+ }
+ }
+
+ // PREPARE THE LIST OF DATA SEGS
+ size = Integer.parseInt(read(values[index++]));
+ dataSegments = new ArrayList<OStorageDataConfiguration>(size);
+ for (int i = 0; i < size; ++i)
+ dataSegments.add(null);
+
+ int dataId;
+ String dataName;
+ OStorageDataConfiguration data;
+ for (int i = 0; i < size; ++i) {
+ dataId = Integer.parseInt(read(values[index++]));
+ dataName = read(values[index++]);
+
+ data = new OStorageDataConfiguration(this, dataName);
+ index = phySegmentFromStream(values, index, data);
+ data.holeFile = new OStorageDataHoleConfiguration(data, read(values[index++]), read(values[index++]), read(values[index++]));
+ dataSegments.set(dataId, data);
+ }
+
+ txSegment = new OStorageTxConfiguration(read(values[index++]), read(values[index++]), read(values[index++]),
+ read(values[index++]), read(values[index++]));
+
+ size = Integer.parseInt(read(values[index++]));
+ properties = new ArrayList<OEntryConfiguration>(size);
+ for (int i = 0; i < size; ++i) {
+ properties.add(new OEntryConfiguration(read(values[index++]), read(values[index++])));
+ }
+
+ return this;
+ }
+
+ public byte[] toStream() throws IOException {
+ StringBuilder buffer = new StringBuilder();
+
+ write(buffer, version);
+ write(buffer, name);
+
+ write(buffer, schemaRecordId);
+ write(buffer, dictionaryRecordId);
+
+ write(buffer, localeLanguage);
+ write(buffer, localeCountry);
+ write(buffer, dateFormat);
+ write(buffer, dateTimeFormat);
+
+ write(buffer, clusters.size());
+ for (OStorageClusterConfiguration c : clusters) {
+ if (c == null)
+ continue;
+
+ write(buffer, c.getId());
+ write(buffer, c.getName());
+
+ if (c instanceof OStoragePhysicalClusterConfiguration) {
+ // PHYSICAL
+ write(buffer, "p");
+ phySegmentToStream(buffer, (OStoragePhysicalClusterConfiguration) c);
+ fileToStream(buffer, ((OStoragePhysicalClusterConfiguration) c).holeFile);
+ } else if (c instanceof OStorageLogicalClusterConfiguration) {
+ // LOGICAL
+ write(buffer, "l");
+ logSegmentToStream(buffer, (OStorageLogicalClusterConfiguration) c);
+ } else {
+ // MEMORY
+ write(buffer, "m");
+ }
+ }
+
+ write(buffer, dataSegments.size());
+ for (OStorageDataConfiguration d : dataSegments) {
+ if (d == null)
+ continue;
+
+ write(buffer, d.id);
+ write(buffer, d.name);
+
+ phySegmentToStream(buffer, d);
+ fileToStream(buffer, d.holeFile);
+ }
+
+ fileToStream(buffer, txSegment);
+ write(buffer, txSegment.isSynchRecord());
+ write(buffer, txSegment.isSynchTx());
+
+ write(buffer, properties.size());
+ for (OEntryConfiguration e : properties)
+ entryToStream(buffer, e);
+
+ if (buffer.length() > FIXED_CONFIG_SIZE)
+ throw new OConfigurationException("Configuration data exceeded size limit: " + FIXED_CONFIG_SIZE + " bytes");
+
+ // ALLOCATE ENOUGHT SPACE TO REUSE IT EVERY TIME
+ buffer.append("|");
+ buffer.setLength(FIXED_CONFIG_SIZE);
+
+ return buffer.toString().getBytes();
+ }
+
+ private int phySegmentFromStream(final String[] values, int index, final OStorageSegmentConfiguration iSegment) {
+ iSegment.maxSize = read(values[index++]);
+ iSegment.fileType = read(values[index++]);
+ iSegment.fileStartSize = read(values[index++]);
+ iSegment.fileMaxSize = read(values[index++]);
+ iSegment.fileIncrementSize = read(values[index++]);
+ iSegment.defrag = read(values[index++]);
+
+ final int size = Integer.parseInt(read(values[index++]));
+ iSegment.infoFiles = new OStorageFileConfiguration[size];
+ for (int i = 0; i < size; ++i) {
+ iSegment.infoFiles[i] = new OStorageFileConfiguration(iSegment, read(values[index++]), read(values[index++]),
+ read(values[index++]), iSegment.fileIncrementSize);
+ }
+
+ return index;
+ }
+
+ private void phySegmentToStream(final StringBuilder iBuffer, final OStorageSegmentConfiguration iSegment) {
+ write(iBuffer, iSegment.maxSize);
+ write(iBuffer, iSegment.fileType);
+ write(iBuffer, iSegment.fileStartSize);
+ write(iBuffer, iSegment.fileMaxSize);
+ write(iBuffer, iSegment.fileIncrementSize);
+ write(iBuffer, iSegment.defrag);
+
+ write(iBuffer, iSegment.infoFiles.length);
+ for (OStorageFileConfiguration f : iSegment.infoFiles)
+ fileToStream(iBuffer, f);
+ }
+
+ private void logSegmentToStream(final StringBuilder iBuffer, final OStorageLogicalClusterConfiguration iSegment) {
+ write(iBuffer, iSegment.physicalClusterId);
+ write(iBuffer, iSegment.map.toString());
+ }
+
+ private void fileToStream(final StringBuilder iBuffer, final OStorageFileConfiguration iFile) {
+ write(iBuffer, iFile.path);
+ write(iBuffer, iFile.type);
+ write(iBuffer, iFile.maxSize);
+ }
+
+ private void entryToStream(final StringBuilder iBuffer, final OEntryConfiguration iEntry) {
+ write(iBuffer, iEntry.name);
+ write(iBuffer, iEntry.value);
+ }
+
+ private String read(final String iValue) {
+ if (iValue.equals(" "))
+ return null;
+ return iValue;
+ }
+
+ private void write(final StringBuilder iBuffer, final Object iValue) {
+ if (iBuffer.length() > 0)
+ iBuffer.append("|");
+ iBuffer.append(iValue != null ? iValue.toString() : " ");
+ }
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/config/OStorageMemoryClusterConfiguration.java b/core/src/main/java/com/orientechnologies/orient/core/config/OStorageMemoryClusterConfiguration.java
new file mode 100644
index 00000000000..a2b8060568c
--- /dev/null
+++ b/core/src/main/java/com/orientechnologies/orient/core/config/OStorageMemoryClusterConfiguration.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 1999-2010 Luca Garulli (l.garulli--at--orientechnologies.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.orientechnologies.orient.core.config;
+
+
+public class OStorageMemoryClusterConfiguration implements OStorageClusterConfiguration {
+ public String name;
+ public int id;
+
+ public OStorageMemoryClusterConfiguration(final String name, final int id) {
+ this.name = name;
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public int getId() {
+ return id;
+ }
+
+ public void setId(final int iId) {
+ id = iId;
+ }
+}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocal.java b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocal.java
index 3db5f7a5c73..226b5c61978 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocal.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OStorageLocal.java
@@ -35,6 +35,7 @@
import com.orientechnologies.orient.core.config.OStorageConfiguration;
import com.orientechnologies.orient.core.config.OStorageDataConfiguration;
import com.orientechnologies.orient.core.config.OStorageLogicalClusterConfiguration;
+import com.orientechnologies.orient.core.config.OStorageMemoryClusterConfiguration;
import com.orientechnologies.orient.core.config.OStoragePhysicalClusterConfiguration;
import com.orientechnologies.orient.core.config.OStorageSegmentConfiguration;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
@@ -56,6 +57,7 @@
import com.orientechnologies.orient.core.storage.ORecordBrowsingListener;
import com.orientechnologies.orient.core.storage.OStorage;
import com.orientechnologies.orient.core.storage.OStorageAbstract;
+import com.orientechnologies.orient.core.storage.impl.memory.OClusterMemory;
import com.orientechnologies.orient.core.tx.OTransaction;
public class OStorageLocal extends OStorageAbstract {
@@ -338,6 +340,8 @@ public int addCluster(String iClusterName, final String iClusterType, final Obje
: (Integer) iParameters[0]);
return addLogicalCluster(iClusterName, physicalClusterId);
+ } else if (OClusterMemory.TYPE.equalsIgnoreCase(iClusterType)) {
+ return addMemoryCluster(iClusterName);
} else
OLogManager.instance().exception(
"Cluster type '" + iClusterType + "' is not supported. Supported types are: " + Arrays.toString(TYPES), null,
@@ -1067,6 +1071,18 @@ private int addLogicalCluster(final String iClusterName, final int iPhysicalClus
return id;
}
+ private int addMemoryCluster(final String iClusterName) throws IOException {
+ final OStorageMemoryClusterConfiguration config = new OStorageMemoryClusterConfiguration(iClusterName, clusters.length);
+
+ configuration.clusters.add(config);
+
+ final OClusterMemory cluster = new OClusterMemory(clusters.length, iClusterName);
+ final int id = registerCluster(cluster);
+ configuration.update();
+
+ return id;
+ }
+
public ODataLocal[] getDataSegments() {
return dataSegments;
}
|
67b40f7142ea54d9d8234fd30ae82c85b3c7176c
|
camel
|
CAMEL-2470: Adding test for sending back a reply- to JMSReplyTo based on a temporary queue.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@949117 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/camel
|
diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsEndpoint.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsEndpoint.java
index 11725b01dccb0..59713a088527d 100644
--- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsEndpoint.java
+++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsEndpoint.java
@@ -120,11 +120,9 @@ public static JmsEndpoint newInstance(Destination destination, JmsComponent comp
public static JmsEndpoint newInstance(Destination destination) throws JMSException {
if (destination instanceof TemporaryQueue) {
return new JmsTemporaryQueueEndpoint((TemporaryQueue) destination);
- }
- if (destination instanceof TemporaryTopic) {
+ } else if (destination instanceof TemporaryTopic) {
return new JmsTemporaryTopicEndpoint((TemporaryTopic) destination);
- }
- if (destination instanceof Queue) {
+ } else if (destination instanceof Queue) {
return new JmsQueueEndpoint((Queue) destination);
} else {
return new JmsEndpoint((Topic) destination);
diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsTemporaryQueueEndpoint.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsTemporaryQueueEndpoint.java
index b30c2c32fdf2d..f7cd4034d1188 100644
--- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsTemporaryQueueEndpoint.java
+++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsTemporaryQueueEndpoint.java
@@ -24,10 +24,11 @@
/**
* A <a href="http://activemq.apache.org/jms.html">JMS Endpoint</a>
* for working with a {@link TemporaryQueue}
+ * <p/>
+ * <b>Important:</b> Need to be really careful to always use the same Connection otherwise the destination goes stale
*
* @version $Revision$
*/
-// TODO need to be really careful to always use the same Connection otherwise the destination goes stale
public class JmsTemporaryQueueEndpoint extends JmsQueueEndpoint implements DestinationEndpoint {
private Destination jmsDestination;
@@ -61,11 +62,10 @@ public boolean isSingleton() {
}
@Override
- // We don't want to manage this temporary object
public Object getManagedObject(JmsEndpoint object) {
+ // We don't want to manage this temporary object, so return null
return null;
}
-
public synchronized Destination getJmsDestination(Session session) throws JMSException {
if (jmsDestination == null) {
diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/issues/TempReplyToIssueTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/issues/TempReplyToIssueTest.java
new file mode 100644
index 0000000000000..f77da977b1c43
--- /dev/null
+++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/issues/TempReplyToIssueTest.java
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.jms.issues;
+
+import javax.jms.ConnectionFactory;
+import javax.jms.Destination;
+
+import org.apache.activemq.ActiveMQConnectionFactory;
+import org.apache.camel.Body;
+import org.apache.camel.CamelContext;
+import org.apache.camel.Exchange;
+import org.apache.camel.Header;
+import org.apache.camel.Processor;
+import org.apache.camel.ProducerTemplate;
+import org.apache.camel.builder.RouteBuilder;
+import org.apache.camel.component.jms.JmsConstants;
+import org.apache.camel.test.junit4.CamelTestSupport;
+import org.junit.Test;
+
+import static org.apache.camel.component.jms.JmsComponent.jmsComponentClientAcknowledge;
+
+/**
+ * @version $Revision$
+ */
+public class TempReplyToIssueTest extends CamelTestSupport {
+
+ @Test
+ public void testReplyToIssue() throws Exception {
+ String out = template.requestBody("activemq:queue:test.queue", "World", String.class);
+ // we should receive that fixed reply
+ assertEquals("Hello Moon", out);
+ }
+
+ public String handleMessage(final @Header("JMSReplyTo") Destination jmsReplyTo, final @Header("JMSCorrelationID") String id,
+ @Body String body, Exchange exchange) throws Exception {
+ assertNotNull(jmsReplyTo);
+ assertTrue("Should be a temp queue", jmsReplyTo.toString().startsWith("temp-queue"));
+
+ // we send the reply manually (notice we just use a bogus endpoint uri)
+ ProducerTemplate producer = exchange.getContext().createProducerTemplate();
+ producer.send("activemq:queue:xxx", new Processor() {
+ public void process(Exchange exchange) throws Exception {
+ exchange.getIn().setBody("Hello Moon");
+ // remember to set correlation id
+ exchange.getIn().setHeader("JMSCorrelationID", id);
+ // this is the real destination we send the reply to
+ exchange.getIn().setHeader(JmsConstants.JMS_DESTINATION, jmsReplyTo);
+ }
+ });
+ // stop it after use
+ producer.stop();
+
+ // sleep a bit so Camel will send the reply a bit later
+ Thread.sleep(1000);
+
+ // this will later cause a problem as the temp queue has been deleted
+ // and exceptions will be logged etc
+ return "Hello " + body;
+ }
+
+ protected CamelContext createCamelContext() throws Exception {
+ CamelContext camelContext = super.createCamelContext();
+ ConnectionFactory connectionFactory = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false");
+ camelContext.addComponent("activemq", jmsComponentClientAcknowledge(connectionFactory));
+ return camelContext;
+ }
+
+ @Override
+ protected RouteBuilder createRouteBuilder() throws Exception {
+ return new RouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ from("activemq:queue:test.queue").bean(TempReplyToIssueTest.class, "handleMessage");
+ }
+ };
+ }
+}
|
d07bf38b5f4f65bb7e38e6afae4d41ec304d97e6
|
restlet-framework-java
|
- Fixed issue with HTTP client connector not- correctly reporting connection and other IO errors.--
|
c
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/source/main/com/noelios/restlet/impl/HttpClientCallImpl.java b/source/main/com/noelios/restlet/impl/HttpClientCallImpl.java
index bbacf77003..f2a0574c81 100644
--- a/source/main/com/noelios/restlet/impl/HttpClientCallImpl.java
+++ b/source/main/com/noelios/restlet/impl/HttpClientCallImpl.java
@@ -27,7 +27,6 @@
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.InetAddress;
-import java.net.ProtocolException;
import java.net.URL;
import java.net.UnknownHostException;
import java.nio.channels.ReadableByteChannel;
@@ -35,8 +34,6 @@
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
import javax.net.ssl.HttpsURLConnection;
@@ -49,9 +46,6 @@
*/
public class HttpClientCallImpl extends ConnectorCallImpl implements ClientCall
{
- /** Obtain a suitable logger. */
- private static Logger logger = Logger.getLogger("com.noelios.restlet.connector.HttpClientCallImpl");
-
/** The wrapped HTTP URL connection. */
protected HttpURLConnection connection;
@@ -114,17 +108,10 @@ public void setRequestMethod(String method)
* Sends the request headers.<br/>
* Must be called before sending the request input.
*/
- public void sendRequestHeaders()
+ public void sendRequestHeaders() throws IOException
{
// Set the request method
- try
- {
- getConnection().setRequestMethod(getRequestMethod());
- }
- catch(ProtocolException e)
- {
- logger.log(Level.WARNING, "Unable to set method", e);
- }
+ getConnection().setRequestMethod(getRequestMethod());
// Set the request headers
Parameter header;
@@ -135,14 +122,7 @@ public void sendRequestHeaders()
}
// Ensure that the connections is active
- try
- {
- getConnection().connect();
- }
- catch(IOException ioe)
- {
- logger.log(Level.WARNING, "Unable to connect to the server", ioe);
- }
+ getConnection().connect();
}
/**
@@ -167,7 +147,7 @@ else if(getRequestChannel() != null)
* Returns the request entity channel if it exists.
* @return The request entity channel if it exists.
*/
- public WritableByteChannel getRequestChannel()
+ public WritableByteChannel getRequestChannel() throws IOException
{
return null;
}
@@ -176,17 +156,9 @@ public WritableByteChannel getRequestChannel()
* Returns the request entity stream if it exists.
* @return The request entity stream if it exists.
*/
- public OutputStream getRequestStream()
+ public OutputStream getRequestStream() throws IOException
{
- try
- {
- return getConnection().getOutputStream();
- }
- catch(IOException e)
- {
- logger.log(Level.WARNING, "Unable to get the request stream", e);
- return null;
- }
+ return getConnection().getOutputStream();
}
/**
@@ -261,7 +233,7 @@ public String getResponseReasonPhrase()
* Returns the response channel if it exists.
* @return The response channel if it exists.
*/
- public ReadableByteChannel getResponseChannel()
+ public ReadableByteChannel getResponseChannel() throws IOException
{
return null;
}
@@ -270,16 +242,8 @@ public ReadableByteChannel getResponseChannel()
* Returns the response stream if it exists.
* @return The response stream if it exists.
*/
- public InputStream getResponseStream()
+ public InputStream getResponseStream() throws IOException
{
- try
- {
- return getConnection().getInputStream();
- }
- catch(IOException e)
- {
- logger.log(Level.FINE, "Unable to get the response stream", e);
- return null;
- }
+ return getConnection().getInputStream();
}
}
diff --git a/source/main/com/noelios/restlet/impl/HttpClientImpl.java b/source/main/com/noelios/restlet/impl/HttpClientImpl.java
index 7b821a8fdc..6ec168d6e1 100644
--- a/source/main/com/noelios/restlet/impl/HttpClientImpl.java
+++ b/source/main/com/noelios/restlet/impl/HttpClientImpl.java
@@ -23,6 +23,7 @@
package com.noelios.restlet.impl;
import java.io.IOException;
+import java.net.ConnectException;
import java.util.Arrays;
import java.util.Date;
import java.util.Iterator;
@@ -52,6 +53,7 @@
import org.restlet.data.Protocols;
import org.restlet.data.Representation;
import org.restlet.data.DefaultStatus;
+import org.restlet.data.Statuses;
import org.restlet.data.Tag;
import com.noelios.restlet.data.ContentType;
@@ -378,9 +380,15 @@ else if(clientCall.getResponseChannel() != null)
}
}
}
+ catch(ConnectException ce)
+ {
+ logger.log(Level.FINE, "An error occured during the connection to the remote HTTP server.", ce);
+ call.setStatus(new DefaultStatus(Statuses.SERVER_ERROR_SERVICE_UNAVAILABLE, "Unable to connect to the remote server. " + ce.getMessage()));
+ }
catch(Exception e)
{
- logger.log(Level.WARNING, "An error occured during the handling of an HTTP client call.", e);
+ logger.log(Level.FINE, "An error occured during the handling of the HTTP client call.", e);
+ call.setStatus(new DefaultStatus(Statuses.SERVER_ERROR_INTERNAL, "Unable to complete the call. " + e.getMessage()));
}
}
diff --git a/source/main/com/noelios/restlet/impl/RestletMapping.java b/source/main/com/noelios/restlet/impl/RestletMapping.java
index a1328ba2db..2f509e23f7 100644
--- a/source/main/com/noelios/restlet/impl/RestletMapping.java
+++ b/source/main/com/noelios/restlet/impl/RestletMapping.java
@@ -33,38 +33,38 @@
*/
public class RestletMapping extends RestletTarget
{
- /** The path pattern. */
- Pattern pathPattern;
+ /** The URI pattern. */
+ Pattern pattern;
/**
* Constructor.
- * @param pathPattern The path pattern.
+ * @param pattern The URI pattern.
* @param target The target interface.
*/
- public RestletMapping(String pathPattern, Restlet target)
+ public RestletMapping(String pattern, Restlet target)
{
super(target);
- this.pathPattern = Pattern.compile(pathPattern, Pattern.CASE_INSENSITIVE);
+ this.pattern = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE);
}
/**
* Constructor.
- * @param pathPattern The path pattern.
+ * @param pattern The URI pattern.
* @param targetClass The target class.
*/
- public RestletMapping(String pathPattern, Class<? extends Restlet> targetClass)
+ public RestletMapping(String pattern, Class<? extends Restlet> targetClass)
{
super(targetClass);
- this.pathPattern = Pattern.compile(pathPattern, Pattern.CASE_INSENSITIVE);
+ this.pattern = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE);
}
/**
- * Returns the path pattern.
- * @return The path pattern.
+ * Returns the URI pattern.
+ * @return The URI pattern.
*/
- public Pattern getPathPattern()
+ public Pattern getPattern()
{
- return this.pathPattern;
+ return this.pattern;
}
}
|
a3856ca9457b0a4a2b0e5f2504b2b0332cbf0e51
|
hbase
|
HBASE-3017 More log pruning--M conf/log4j.properties- Make ZKW log at INFO-level-M src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java- Log message cleanup.-M src/main/java/org/apache/hadoop/hbase/master/HMaster.java- Remove excessive hostname+port qualifier on master for zk messages-M src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java- Log message cleanup-M src/main/java/org/apache/hadoop/hbase/master/ServerManager.java- Format the ServerMonitor message.-M src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java- Remove excessive hostname on zk message id; just add port-M src/main/java/org/apache/hadoop/hbase/zookeeper/ZKAssign.java- Cleanup of messages.---git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@999057 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index 796a42c32106..8340b6e9619d 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -919,6 +919,7 @@ Release 0.21.0 - Unreleased
HBASE-2988 Support alternate compression for major compactions
HBASE-2941 port HADOOP-6713 - threading scalability for RPC reads - to HBase
HBASE-2782 QOS for META table access
+ HBASE-3017 More log pruning
NEW FEATURES
HBASE-1961 HBase EC2 scripts
diff --git a/conf/log4j.properties b/conf/log4j.properties
index ec341410a9ca..62b552113834 100644
--- a/conf/log4j.properties
+++ b/conf/log4j.properties
@@ -43,6 +43,8 @@ log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}:
log4j.logger.org.apache.zookeeper=INFO
#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
log4j.logger.org.apache.hadoop.hbase=DEBUG
+# Make these two classes INFO-level. Make them DEBUG to see more zk debug.
+log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFO
+log4j.logger.org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher=INFO
#log4j.logger.org.apache.hadoop.dfs=DEBUG
# Set this class to log INFO only otherwise its OTT
-log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFO
diff --git a/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java b/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
index 557bc0499cdd..8607267af627 100644
--- a/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
+++ b/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
@@ -253,7 +253,7 @@ private void handleRegion(RegionTransitionData data) {
RegionState regionState = regionsInTransition.get(encodedName);
switch(data.getEventType()) {
case M2ZK_REGION_OFFLINE:
- LOG.warn("What to do with this event? " + data);
+ // Nothing to do.
break;
case RS2ZK_REGION_CLOSING:
@@ -545,8 +545,8 @@ private void assign(final RegionState state) {
if (plan == null) {
LOG.debug("No previous transition plan for " +
state.getRegion().getRegionNameAsString() +
- " so generating a random one from " + serverManager.countOfRegionServers() +
- " ( " + serverManager.getOnlineServers().size() + ") available servers");
+ " so generating a random one; " + serverManager.countOfRegionServers() +
+ " (online=" + serverManager.getOnlineServers().size() + ") available servers");
plan = new RegionPlan(state.getRegion(), null,
LoadBalancer.randomAssignment(serverManager.getOnlineServersList()));
regionPlans.put(encodedName, plan);
diff --git a/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index bccdc0e502ec..f580c0b93131 100644
--- a/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -203,8 +203,7 @@ public HMaster(final Configuration conf)
"_" + System.currentTimeMillis());
}
- this.zooKeeper =
- new ZooKeeperWatcher(conf, MASTER + "-" + getMasterAddress(), this);
+ this.zooKeeper = new ZooKeeperWatcher(conf, MASTER, this);
/*
* 2. Block on becoming the active master.
diff --git a/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java b/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
index d870d443f0c7..04650edb8f27 100644
--- a/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
+++ b/src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
@@ -165,13 +165,13 @@ void splitLogAfterStartup(final Map<String, HServerInfo> onlineServers) {
}
for (FileStatus status : logFolders) {
String serverName = status.getPath().getName();
- LOG.info("Found log folder : " + serverName);
- if(onlineServers.get(serverName) == null) {
- LOG.info("Log folder doesn't belong " +
+ if (onlineServers.get(serverName) == null) {
+ LOG.info("Log folder " + status.getPath() + " doesn't belong " +
"to a known region server, splitting");
splitLog(serverName);
} else {
- LOG.info("Log folder belongs to an existing region server");
+ LOG.info("Log folder " + status.getPath() +
+ " belongs to an existing region server");
}
}
}
@@ -279,4 +279,4 @@ public void deleteFamily(HRegionInfo region, byte[] familyName)
new Path(rootdir, region.getTableDesc().getNameAsString()),
region.getEncodedName(), familyName), true);
}
-}
\ No newline at end of file
+}
diff --git a/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java b/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
index 581fd108644e..3c6b09cce29d 100644
--- a/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
+++ b/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
@@ -27,6 +27,7 @@
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
+import java.text.DecimalFormat;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -93,6 +94,8 @@ public class ServerManager {
private final DeadServer deadservers = new DeadServer();
+ private static final DecimalFormat DF = new DecimalFormat("#.##");
+
/**
* Dumps into log current stats on dead servers and number of servers
* TODO: Make this a metric; dump metrics into log.
@@ -108,10 +111,9 @@ protected void chore() {
int numDeadServers = deadservers.size();
double averageLoad = getAverageLoad();
String deadServersList = deadservers.toString();
- LOG.info(numServers + " region servers, " + numDeadServers +
- " dead, average load " + averageLoad +
- ((deadServersList != null && deadServersList.length() > 0)?
- deadServersList: ""));
+ LOG.info("regionservers=" + numServers +
+ ", averageload=" + DF.format(averageLoad) +
+ ((numDeadServers > 0)? ("deadservers=" + deadServersList): ""));
}
}
@@ -422,7 +424,7 @@ void letRegionServersShutdown() {
LOG.info("Waiting on following regionserver(s) to go down " +
this.onlineServers.values());
try {
- this.onlineServers.wait(500);
+ this.onlineServers.wait(1000);
} catch (InterruptedException e) {
// continue
}
@@ -516,7 +518,7 @@ private HRegionInterface getServerConnection(HServerInfo info) {
HConnectionManager.getConnection(this.master.getConfiguration());
HRegionInterface hri = serverConnections.get(info.getServerName());
if (hri == null) {
- LOG.info("new connection");
+ LOG.debug("New connection to " + info.getServerName());
hri = connection.getHRegionConnection(info.getServerAddress(), false);
serverConnections.put(info.getServerName(), hri);
}
@@ -537,9 +539,10 @@ public void waitForRegionServers()
getLong("hbase.master.wait.on.regionservers.interval", 3000);
// So, number of regionservers > 0 and its been n since last check in, break,
// else just stall here
+ int count = 0;
for (int oldcount = countOfRegionServers(); !this.master.isStopped();) {
Thread.sleep(interval);
- int count = countOfRegionServers();
+ count = countOfRegionServers();
if (count == oldcount && count > 0) break;
if (count == 0) {
LOG.info("Waiting on regionserver(s) to checkin");
@@ -548,6 +551,8 @@ public void waitForRegionServers()
}
oldcount = count;
}
+ LOG.info("Exiting wait on regionserver(s) to checkin; count=" + count +
+ ", stopped=" + this.master.isStopped());
}
public List<HServerInfo> getOnlineServersList() {
diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index f2e4e7cbae3a..afbc77b34f78 100644
--- a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -431,8 +431,8 @@ private void initialize() throws IOException, InterruptedException {
private void initializeZooKeeper() throws IOException, InterruptedException {
// open connection to zookeeper and set primary watcher
- zooKeeper = new ZooKeeperWatcher(conf, REGIONSERVER + "-"
- + serverInfo.getServerName(), this);
+ zooKeeper = new ZooKeeperWatcher(conf, REGIONSERVER + ":" +
+ serverInfo.getServerAddress().getPort(), this);
this.clusterStatusTracker = new ClusterStatusTracker(this.zooKeeper, this);
this.clusterStatusTracker.start();
diff --git a/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKAssign.java b/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKAssign.java
index a26ff65eefc9..04a30cf831b8 100644
--- a/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKAssign.java
+++ b/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKAssign.java
@@ -135,8 +135,8 @@ public static void createNodeOffline(ZooKeeperWatcher zkw, HRegionInfo region,
public static void createNodeOffline(ZooKeeperWatcher zkw, HRegionInfo region,
String serverName, final EventType event)
throws KeeperException, KeeperException.NodeExistsException {
- LOG.debug(zkw.prefix("Creating an unassigned node for " +
- region.getEncodedName() + " in an OFFLINE state"));
+ LOG.debug(zkw.prefix("Creating unassigned node for " +
+ region.getEncodedName() + " in OFFLINE state"));
RegionTransitionData data = new RegionTransitionData(event,
region.getRegionName(), serverName);
synchronized(zkw.getNodes()) {
@@ -167,8 +167,8 @@ public static void createNodeOffline(ZooKeeperWatcher zkw, HRegionInfo region,
public static void forceNodeOffline(ZooKeeperWatcher zkw, HRegionInfo region,
String serverName)
throws KeeperException, KeeperException.NoNodeException {
- LOG.debug(zkw.prefix("Forcing an existing unassigned node for " +
- region.getEncodedName() + " to an OFFLINE state"));
+ LOG.debug(zkw.prefix("Forcing existing unassigned node for " +
+ region.getEncodedName() + " to OFFLINE state"));
RegionTransitionData data = new RegionTransitionData(
EventType.M2ZK_REGION_OFFLINE, region.getRegionName(), serverName);
synchronized(zkw.getNodes()) {
@@ -200,8 +200,8 @@ public static void forceNodeOffline(ZooKeeperWatcher zkw, HRegionInfo region,
public static boolean createOrForceNodeOffline(ZooKeeperWatcher zkw,
HRegionInfo region, String serverName)
throws KeeperException {
- LOG.debug(zkw.prefix("Creating or updating an unassigned node for " +
- region.getEncodedName() + " with an OFFLINE state"));
+ LOG.debug(zkw.prefix("Creating (or updating) unassigned node for " +
+ region.getEncodedName() + " with OFFLINE state"));
RegionTransitionData data = new RegionTransitionData(
EventType.M2ZK_REGION_OFFLINE, region.getRegionName(), serverName);
synchronized(zkw.getNodes()) {
@@ -319,7 +319,7 @@ public static boolean deleteClosingNode(ZooKeeperWatcher zkw,
private static boolean deleteNode(ZooKeeperWatcher zkw, String regionName,
EventType expectedState)
throws KeeperException, KeeperException.NoNodeException {
- LOG.debug(zkw.prefix("Deleting an existing unassigned " +
+ LOG.debug(zkw.prefix("Deleting existing unassigned " +
"node for " + regionName + " that is in expected state " + expectedState));
String node = getNodeName(zkw, regionName);
Stat stat = new Stat();
@@ -329,7 +329,7 @@ private static boolean deleteNode(ZooKeeperWatcher zkw, String regionName,
}
RegionTransitionData data = RegionTransitionData.fromBytes(bytes);
if(!data.getEventType().equals(expectedState)) {
- LOG.warn(zkw.prefix("Attempting to delete an unassigned " +
+ LOG.warn(zkw.prefix("Attempting to delete unassigned " +
"node in " + expectedState +
" state but node is in " + data.getEventType() + " state"));
return false;
@@ -338,7 +338,7 @@ private static boolean deleteNode(ZooKeeperWatcher zkw, String regionName,
// TODO: Does this go here or only if we successfully delete node?
zkw.getNodes().remove(node);
if(!ZKUtil.deleteNode(zkw, node, stat.getVersion())) {
- LOG.warn(zkw.prefix("Attempting to delete an " +
+ LOG.warn(zkw.prefix("Attempting to delete " +
"unassigned node in " + expectedState +
" state but " +
"after verifying it was in OPENED state, we got a version mismatch"));
@@ -392,7 +392,7 @@ public static void deleteAllNodes(ZooKeeperWatcher zkw)
public static int createNodeClosing(ZooKeeperWatcher zkw, HRegionInfo region,
String serverName)
throws KeeperException, KeeperException.NodeExistsException {
- LOG.debug(zkw.prefix("Creating an unassigned node for " +
+ LOG.debug(zkw.prefix("Creating unassigned node for " +
region.getEncodedName() + " in a CLOSING state"));
RegionTransitionData data = new RegionTransitionData(
EventType.RS2ZK_REGION_CLOSING, region.getRegionName(), serverName);
|
21760a8b6b030233d4a82d8026bc9910e0a93ea5
|
spring-framework
|
Provide alternative message code resolver styles--Introduce new 'style' property to DefaultMessageCodesResolver allowing-for alternative message styles. Current styles are PREFIX_ERROR_CODE-and POSTFIX_ERROR_CODE. The default style retains existing behavior.--Issue: SPR-9707-
|
a
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-context/src/main/java/org/springframework/validation/DefaultMessageCodesResolver.java b/spring-context/src/main/java/org/springframework/validation/DefaultMessageCodesResolver.java
index ad282eb1d19c..44d8b65c720c 100644
--- a/spring-context/src/main/java/org/springframework/validation/DefaultMessageCodesResolver.java
+++ b/spring-context/src/main/java/org/springframework/validation/DefaultMessageCodesResolver.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2008 the original author or authors.
+ * Copyright 2002-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,14 +18,18 @@
import java.io.Serializable;
import java.util.ArrayList;
+import java.util.Collection;
+import java.util.LinkedHashSet;
import java.util.List;
+import java.util.Set;
import org.springframework.util.StringUtils;
/**
* Default implementation of the {@link MessageCodesResolver} interface.
*
- * <p>Will create two message codes for an object error, in the following order:
+ * <p>Will create two message codes for an object error, in the following order (when
+ * using the {@link Style#PREFIX_ERROR_CODE prefixed} {@link #setStyle(Style) style}):
* <ul>
* <li>1.: code + "." + object name
* <li>2.: code
@@ -68,11 +72,16 @@
* <li>7. try "typeMismatch"
* </ul>
*
+ * <p>By default the {@code errorCode}s will be placed at the beginning of constructed
+ * message strings. The {@link #setStyle(Style) style} property can be used to specify
+ * alternative {@link Style styles} of concatination.
+ *
* <p>In order to group all codes into a specific category within your resource bundles,
* e.g. "validation.typeMismatch.name" instead of the default "typeMismatch.name",
* consider specifying a {@link #setPrefix prefix} to be applied.
*
* @author Juergen Hoeller
+ * @author Phillip Webb
* @since 1.0.1
*/
@SuppressWarnings("serial")
@@ -83,9 +92,13 @@ public class DefaultMessageCodesResolver implements MessageCodesResolver, Serial
*/
public static final String CODE_SEPARATOR = ".";
+ private static final Style DEFAULT_STYLE = Style.PREFIX_ERROR_CODE;
+
private String prefix = "";
+ private Style style = DEFAULT_STYLE;
+
/**
* Specify a prefix to be applied to any code built by this resolver.
@@ -96,6 +109,14 @@ public void setPrefix(String prefix) {
this.prefix = (prefix != null ? prefix : "");
}
+ /**
+ * Specify the style of message code that will be built by this resolver.
+ * <p>Default is {@link Style#PREFIX_ERROR_CODE}.
+ */
+ public void setStyle(Style style) {
+ this.style = (style == null ? DEFAULT_STYLE : style);
+ }
+
/**
* Return the prefix to be applied to any code built by this resolver.
* <p>Returns an empty String in case of no prefix.
@@ -106,9 +127,7 @@ protected String getPrefix() {
public String[] resolveMessageCodes(String errorCode, String objectName) {
- return new String[] {
- postProcessMessageCode(errorCode + CODE_SEPARATOR + objectName),
- postProcessMessageCode(errorCode)};
+ return resolveMessageCodes(errorCode, objectName, "", null);
}
/**
@@ -121,26 +140,54 @@ public String[] resolveMessageCodes(String errorCode, String objectName) {
* @return the list of codes
*/
public String[] resolveMessageCodes(String errorCode, String objectName, String field, Class<?> fieldType) {
- List<String> codeList = new ArrayList<String>();
+ Set<String> codeList = new LinkedHashSet<String>();
List<String> fieldList = new ArrayList<String>();
buildFieldList(field, fieldList);
- for (String fieldInList : fieldList) {
- codeList.add(postProcessMessageCode(errorCode + CODE_SEPARATOR + objectName + CODE_SEPARATOR + fieldInList));
- }
+ addCodes(codeList, errorCode, objectName, fieldList);
int dotIndex = field.lastIndexOf('.');
if (dotIndex != -1) {
buildFieldList(field.substring(dotIndex + 1), fieldList);
}
- for (String fieldInList : fieldList) {
- codeList.add(postProcessMessageCode(errorCode + CODE_SEPARATOR + fieldInList));
- }
+ addCodes(codeList, errorCode, null, fieldList);
if (fieldType != null) {
- codeList.add(postProcessMessageCode(errorCode + CODE_SEPARATOR + fieldType.getName()));
+ addCode(codeList, errorCode, null, fieldType.getName());
}
- codeList.add(postProcessMessageCode(errorCode));
+ addCode(codeList, errorCode, null, null);
return StringUtils.toStringArray(codeList);
}
+ private void addCodes(Collection<String> codeList, String errorCode, String objectName, Iterable<String> fields) {
+ for (String field : fields) {
+ addCode(codeList, errorCode, objectName, field);
+ }
+ }
+
+ private void addCode(Collection<String> codeList, String errorCode, String objectName, String field) {
+ String code = getCode(errorCode, objectName, field);
+ codeList.add(postProcessMessageCode(code));
+ }
+
+ private String getCode(String errorCode, String objectName, String field) {
+ switch (this.style) {
+ case PREFIX_ERROR_CODE:
+ return toDelimitedString(errorCode, objectName, field);
+ case POSTFIX_ERROR_CODE:
+ return toDelimitedString(objectName, field, errorCode);
+ }
+ throw new IllegalStateException("Unknown style " + this.style);
+ }
+
+ private String toDelimitedString(String... elements) {
+ StringBuilder rtn = new StringBuilder();
+ for (String element : elements) {
+ if(StringUtils.hasLength(element)) {
+ rtn.append(rtn.length() == 0 ? "" : CODE_SEPARATOR);
+ rtn.append(element);
+ }
+ }
+ return rtn.toString();
+ }
+
/**
* Add both keyed and non-keyed entries for the supplied <code>field</code>
* to the supplied field list.
@@ -173,4 +220,23 @@ protected String postProcessMessageCode(String code) {
return getPrefix() + code;
}
+
+ /**
+ * The various styles that can be used to construct message codes.
+ */
+ public static enum Style {
+
+ /**
+ * Prefix the error code at the beginning of the generated message code. eg:
+ * {@code errorCode + "." + object name + "." + field}
+ */
+ PREFIX_ERROR_CODE,
+
+ /**
+ * Postfix the error code at the end of the generated message code. eg:
+ * {@code object name + "." + field + "." + errorCode}
+ */
+ POSTFIX_ERROR_CODE
+ }
+
}
diff --git a/spring-context/src/test/java/org/springframework/validation/DefaultMessageCodesResolverTests.java b/spring-context/src/test/java/org/springframework/validation/DefaultMessageCodesResolverTests.java
index 33083409ae6e..18bb109b5a32 100644
--- a/spring-context/src/test/java/org/springframework/validation/DefaultMessageCodesResolverTests.java
+++ b/spring-context/src/test/java/org/springframework/validation/DefaultMessageCodesResolverTests.java
@@ -22,6 +22,7 @@
import org.junit.Test;
import org.springframework.beans.TestBean;
+import org.springframework.validation.DefaultMessageCodesResolver.Style;
/**
* Tests for {@link DefaultMessageCodesResolver}.
@@ -119,5 +120,26 @@ public void shouldSupportNullFieldType() throws Exception {
"errorCode.objectName.field",
"errorCode.field",
"errorCode" })));
- }
+ }
+
+ @Test
+ public void shouldSupportPostfixStyle() throws Exception {
+ resolver.setStyle(Style.POSTFIX_ERROR_CODE);
+ String[] codes = resolver.resolveMessageCodes("errorCode", "objectName");
+ assertThat(codes, is(equalTo(new String[] {
+ "objectName.errorCode",
+ "errorCode" })));
+ }
+
+ @Test
+ public void shouldSupportFieldPostfixStyle() throws Exception {
+ resolver.setStyle(Style.POSTFIX_ERROR_CODE);
+ String[] codes = resolver.resolveMessageCodes("errorCode", "objectName", "field",
+ TestBean.class);
+ assertThat(codes, is(equalTo(new String[] {
+ "objectName.field.errorCode",
+ "field.errorCode",
+ "org.springframework.beans.TestBean.errorCode",
+ "errorCode" })));
+ }
}
|
322578b7736f174b9b8e47914c87e9b77c1c1fd4
|
kotlin
|
Replaced AddReturnTypeFix with- SpecifyTypeExplicitlyAction for properties.--
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/idea/src/org/jetbrains/jet/plugin/intentions/SpecifyTypeExplicitlyAction.java b/idea/src/org/jetbrains/jet/plugin/intentions/SpecifyTypeExplicitlyAction.java
index f10680dddf543..ae3269014cf68 100644
--- a/idea/src/org/jetbrains/jet/plugin/intentions/SpecifyTypeExplicitlyAction.java
+++ b/idea/src/org/jetbrains/jet/plugin/intentions/SpecifyTypeExplicitlyAction.java
@@ -20,17 +20,17 @@
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiElement;
-import com.intellij.psi.impl.source.tree.LeafPsiElement;
import com.intellij.psi.util.PsiTreeUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.jet.lang.descriptors.DeclarationDescriptor;
import org.jetbrains.jet.lang.descriptors.VariableDescriptor;
+import org.jetbrains.jet.lang.diagnostics.Diagnostic;
+import org.jetbrains.jet.lang.diagnostics.Errors;
import org.jetbrains.jet.lang.psi.JetFile;
import org.jetbrains.jet.lang.psi.JetProperty;
import org.jetbrains.jet.lang.resolve.BindingContext;
import org.jetbrains.jet.lang.types.ErrorUtils;
import org.jetbrains.jet.lang.types.JetType;
-import org.jetbrains.jet.lexer.JetTokens;
import org.jetbrains.jet.plugin.JetBundle;
import org.jetbrains.jet.plugin.project.AnalyzeSingleFileUtil;
import org.jetbrains.jet.plugin.refactoring.introduceVariable.JetChangePropertyActions;
@@ -41,6 +41,15 @@
*/
public class SpecifyTypeExplicitlyAction extends PsiElementBaseIntentionAction {
private JetType targetType;
+ private boolean disabledForError;
+
+ public SpecifyTypeExplicitlyAction() {
+ this(true);
+ }
+
+ public SpecifyTypeExplicitlyAction(boolean disabledForError) {
+ this.disabledForError = disabledForError;
+ }
@NotNull
@Override
@@ -82,6 +91,13 @@ public boolean isAvailable(@NotNull Project project, Editor editor, @NotNull Psi
if (ErrorUtils.isErrorType(targetType)) {
return false;
}
+ if (disabledForError) {
+ for (Diagnostic diagnostic : bindingContext.getDiagnostics()) {
+ if (Errors.PUBLIC_MEMBER_SHOULD_SPECIFY_TYPE == diagnostic.getFactory() && property == diagnostic.getPsiElement()) {
+ return false;
+ }
+ }
+ }
}
return true;
}
diff --git a/idea/src/org/jetbrains/jet/plugin/quickfix/AddReturnTypeFix.java b/idea/src/org/jetbrains/jet/plugin/quickfix/AddReturnTypeFix.java
index 6a55bbfd47118..ec9a070dedaf2 100644
--- a/idea/src/org/jetbrains/jet/plugin/quickfix/AddReturnTypeFix.java
+++ b/idea/src/org/jetbrains/jet/plugin/quickfix/AddReturnTypeFix.java
@@ -52,7 +52,7 @@ public String getFamilyName() {
@Override
public boolean isAvailable(@NotNull Project project, Editor editor, PsiFile file) {
JetType type = QuickFixUtil.getDeclarationReturnType(element);
- return super.isAvailable(project, editor, file) && type != null && !ErrorUtils.isErrorType(type);
+ return super.isAvailable(project, editor, file) && type != null && !ErrorUtils.isErrorType(type) && element instanceof JetFunction;
}
@Override
@@ -61,9 +61,6 @@ public void invoke(@NotNull Project project, Editor editor, PsiFile file) throws
PsiElement newElement;
JetType type = QuickFixUtil.getDeclarationReturnType(element);
if (type == null) return;
- if (element instanceof JetProperty) {
- newElement = addPropertyType(project, (JetProperty) element, type);
- }
else {
assert element instanceof JetFunction;
newElement = addFunctionType(project, (JetFunction) element, type);
diff --git a/idea/src/org/jetbrains/jet/plugin/quickfix/QuickFixes.java b/idea/src/org/jetbrains/jet/plugin/quickfix/QuickFixes.java
index dda1344366b85..ebd50c228edf1 100644
--- a/idea/src/org/jetbrains/jet/plugin/quickfix/QuickFixes.java
+++ b/idea/src/org/jetbrains/jet/plugin/quickfix/QuickFixes.java
@@ -23,6 +23,7 @@
import org.jetbrains.jet.lang.diagnostics.Errors;
import org.jetbrains.jet.lang.psi.JetClass;
import org.jetbrains.jet.plugin.codeInsight.ImplementMethodsHandler;
+import org.jetbrains.jet.plugin.intentions.SpecifyTypeExplicitlyAction;
import java.util.Collection;
@@ -137,5 +138,6 @@ private QuickFixes() {}
actions.put(UNNECESSARY_SAFE_CALL, new ReplaceCallFix(false));
actions.put(UNSAFE_CALL, new ReplaceCallFix(true));
+ actions.put(PUBLIC_MEMBER_SHOULD_SPECIFY_TYPE, new SpecifyTypeExplicitlyAction(false));
}
}
\ No newline at end of file
diff --git a/idea/testData/quickfix/typeAddition/afterPublicValWithoutReturnType.kt b/idea/testData/quickfix/typeAddition/afterPublicValWithoutReturnType.kt
index 9f66148255266..d0796a7cd7444 100644
--- a/idea/testData/quickfix/typeAddition/afterPublicValWithoutReturnType.kt
+++ b/idea/testData/quickfix/typeAddition/afterPublicValWithoutReturnType.kt
@@ -1,4 +1,4 @@
-// "Add return type declaration" "true"
+// "Specify Type Explicitly" "true"
package a
import java.util.List
diff --git a/idea/testData/quickfix/typeAddition/beforePublicValWithoutReturnType.kt b/idea/testData/quickfix/typeAddition/beforePublicValWithoutReturnType.kt
index 5fb773845c16f..6155ec718689b 100644
--- a/idea/testData/quickfix/typeAddition/beforePublicValWithoutReturnType.kt
+++ b/idea/testData/quickfix/typeAddition/beforePublicValWithoutReturnType.kt
@@ -1,4 +1,4 @@
-// "Add return type declaration" "true"
+// "Specify Type Explicitly" "true"
package a
public val <caret>l = java.util.Collections.emptyList<Int>()
\ No newline at end of file
diff --git a/idea/testData/quickfix/typeImports/afterImportFromAnotherFile.kt b/idea/testData/quickfix/typeImports/afterImportFromAnotherFile.kt
index 40d4462c1fa22..5794bf9f5738d 100644
--- a/idea/testData/quickfix/typeImports/afterImportFromAnotherFile.kt
+++ b/idea/testData/quickfix/typeImports/afterImportFromAnotherFile.kt
@@ -1,4 +1,4 @@
-// "Add return type declaration" "true"
+// "Specify Type Explicitly" "true"
package a
diff --git a/idea/testData/quickfix/typeImports/afterNoImportFromTheSameFile.kt b/idea/testData/quickfix/typeImports/afterNoImportFromTheSameFile.kt
index dbf8c11c53b72..5acd2b8b7e562 100644
--- a/idea/testData/quickfix/typeImports/afterNoImportFromTheSameFile.kt
+++ b/idea/testData/quickfix/typeImports/afterNoImportFromTheSameFile.kt
@@ -1,4 +1,4 @@
-// "Add return type declaration" "true"
+// "Specify Type Explicitly" "true"
class A() {}
diff --git a/idea/testData/quickfix/typeImports/beforeImportFromAnotherFile.Main.kt b/idea/testData/quickfix/typeImports/beforeImportFromAnotherFile.Main.kt
index 66f32cb95a825..bc5de5d7d13da 100644
--- a/idea/testData/quickfix/typeImports/beforeImportFromAnotherFile.Main.kt
+++ b/idea/testData/quickfix/typeImports/beforeImportFromAnotherFile.Main.kt
@@ -1,4 +1,4 @@
-// "Add return type declaration" "true"
+// "Specify Type Explicitly" "true"
package a
diff --git a/idea/testData/quickfix/typeImports/beforeNoImportFromTheSameFile.kt b/idea/testData/quickfix/typeImports/beforeNoImportFromTheSameFile.kt
index 2b6d8501e56ba..973d53507d706 100644
--- a/idea/testData/quickfix/typeImports/beforeNoImportFromTheSameFile.kt
+++ b/idea/testData/quickfix/typeImports/beforeNoImportFromTheSameFile.kt
@@ -1,4 +1,4 @@
-// "Add return type declaration" "true"
+// "Specify Type Explicitly" "true"
class A() {}
|
d9ed3ad870a70bbda3183148223ee519e45bde43
|
orientdb
|
Issue 762 was fixed.--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/OClassIndexManager.java b/core/src/main/java/com/orientechnologies/orient/core/index/OClassIndexManager.java
index e860fcefb6b..2699f835912 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/index/OClassIndexManager.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/index/OClassIndexManager.java
@@ -29,6 +29,7 @@
import java.util.SortedSet;
import java.util.TreeSet;
+import com.orientechnologies.common.collection.OCompositeKey;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.db.record.OMultiValueChangeEvent;
import com.orientechnologies.orient.core.db.record.OMultiValueChangeTimeLine;
@@ -182,12 +183,7 @@ public void onRecordAfterDelete(final ODocument iRecord) {
// REMOVE INDEX OF ENTRIES FOR THE NON CHANGED ONLY VALUES
for (final OIndex<?> index : indexes) {
final Object key = index.getDefinition().getDocumentValueToIndex(iRecord);
- if (key instanceof Collection) {
- for (final Object keyItem : (Collection<?>) key)
- if (keyItem != null)
- index.remove(keyItem, iRecord);
- } else if (key != null)
- index.remove(key, iRecord);
+ deleteIndexKey(index, iRecord, key);
}
}
@@ -204,37 +200,80 @@ public void onRecordDeleteFailed(ODocument iDocument) {
releaseModificationLock(iDocument);
}
- private void processCompositeIndexUpdate(final OIndex<?> index, final Set<String> dirtyFields, final ODocument iRecord) {
- final OIndexDefinition indexDefinition = index.getDefinition();
+ private static void processCompositeIndexUpdate(final OIndex<?> index, final Set<String> dirtyFields, final ODocument iRecord) {
+ final OCompositeIndexDefinition indexDefinition = (OCompositeIndexDefinition) index.getDefinition();
+
final List<String> indexFields = indexDefinition.getFields();
+ final String multiValueField = indexDefinition.getMultiValueField();
+
for (final String indexField : indexFields) {
if (dirtyFields.contains(indexField)) {
final List<Object> origValues = new ArrayList<Object>(indexFields.size());
for (final String field : indexFields) {
- if (dirtyFields.contains(field)) {
- origValues.add(iRecord.getOriginalValue(field));
- } else {
- origValues.add(iRecord.<Object> field(field));
- }
+ if (!field.equals(multiValueField))
+ if (dirtyFields.contains(field)) {
+ origValues.add(iRecord.getOriginalValue(field));
+ } else {
+ origValues.add(iRecord.<Object> field(field));
+ }
}
- final Object origValue = indexDefinition.createValue(origValues);
- final Object newValue = indexDefinition.getDocumentValueToIndex(iRecord);
+ if (multiValueField == null) {
+ final Object origValue = indexDefinition.createValue(origValues);
+ final Object newValue = indexDefinition.getDocumentValueToIndex(iRecord);
- if (origValue != null) {
- index.remove(origValue, iRecord);
- }
+ if (origValue != null)
+ index.remove(origValue, iRecord);
- if (newValue != null) {
- index.put(newValue, iRecord.placeholder());
+ if (newValue != null)
+ index.put(newValue, iRecord.placeholder());
+ } else {
+ final OMultiValueChangeTimeLine<?, ?> multiValueChangeTimeLine = iRecord.getCollectionTimeLine(multiValueField);
+ if (multiValueChangeTimeLine == null) {
+ if (dirtyFields.contains(multiValueField))
+ origValues.add(indexDefinition.getMultiValueDefinitionIndex(), iRecord.getOriginalValue(multiValueField));
+ else
+ origValues.add(indexDefinition.getMultiValueDefinitionIndex(), iRecord.field(multiValueField));
+
+ final Object origValue = indexDefinition.createValue(origValues);
+ final Object newValue = indexDefinition.getDocumentValueToIndex(iRecord);
+
+ processIndexUpdateFieldAssignment(index, iRecord, origValue, newValue);
+ } else {
+ if (dirtyFields.size() == 1) {
+ final Map<OCompositeKey, Integer> keysToAdd = new HashMap<OCompositeKey, Integer>();
+ final Map<OCompositeKey, Integer> keysToRemove = new HashMap<OCompositeKey, Integer>();
+
+ for (OMultiValueChangeEvent<?, ?> changeEvent : multiValueChangeTimeLine.getMultiValueChangeEvents()) {
+ indexDefinition.processChangeEvent(changeEvent, keysToAdd, keysToRemove, origValues.toArray());
+ }
+
+ for (final Object keyToRemove : keysToRemove.keySet())
+ index.remove(keyToRemove, iRecord);
+
+ for (final Object keyToAdd : keysToAdd.keySet())
+ index.put(keyToAdd, iRecord.placeholder());
+ } else {
+ final OTrackedMultiValue fieldValue = iRecord.field(multiValueField);
+ final Object restoredMultiValue = fieldValue
+ .returnOriginalState(multiValueChangeTimeLine.getMultiValueChangeEvents());
+
+ origValues.add(indexDefinition.getMultiValueDefinitionIndex(), restoredMultiValue);
+
+ final Object origValue = indexDefinition.createValue(origValues);
+ final Object newValue = indexDefinition.getDocumentValueToIndex(iRecord);
+
+ processIndexUpdateFieldAssignment(index, iRecord, origValue, newValue);
+ }
+ }
}
return;
}
}
}
- private void processSingleIndexUpdate(final OIndex<?> index, final Set<String> dirtyFields, final ODocument iRecord) {
+ private static void processSingleIndexUpdate(final OIndex<?> index, final Set<String> dirtyFields, final ODocument iRecord) {
final OIndexDefinition indexDefinition = index.getDefinition();
final List<String> indexFields = indexDefinition.getFields();
@@ -265,48 +304,47 @@ private void processSingleIndexUpdate(final OIndex<?> index, final Set<String> d
final Object origValue = indexDefinition.createValue(iRecord.getOriginalValue(indexField));
final Object newValue = indexDefinition.getDocumentValueToIndex(iRecord);
- if ((origValue instanceof Collection) && (newValue instanceof Collection)) {
- final Set<Object> valuesToRemove = new HashSet<Object>((Collection<?>) origValue);
- final Set<Object> valuesToAdd = new HashSet<Object>((Collection<?>) newValue);
+ processIndexUpdateFieldAssignment(index, iRecord, origValue, newValue);
+ }
+ }
- valuesToRemove.removeAll((Collection<?>) newValue);
- valuesToAdd.removeAll((Collection<?>) origValue);
+ private static void processIndexUpdateFieldAssignment(OIndex<?> index, ODocument iRecord, final Object origValue,
+ final Object newValue) {
+ if ((origValue instanceof Collection) && (newValue instanceof Collection)) {
+ final Set<Object> valuesToRemove = new HashSet<Object>((Collection<?>) origValue);
+ final Set<Object> valuesToAdd = new HashSet<Object>((Collection<?>) newValue);
- for (final Object valueToRemove : valuesToRemove) {
- if (valueToRemove != null) {
- index.remove(valueToRemove, iRecord);
- }
- }
+ valuesToRemove.removeAll((Collection<?>) newValue);
+ valuesToAdd.removeAll((Collection<?>) origValue);
- for (final Object valueToAdd : valuesToAdd) {
- if (valueToAdd != null) {
- index.put(valueToAdd, iRecord);
- }
+ for (final Object valueToRemove : valuesToRemove) {
+ if (valueToRemove != null) {
+ index.remove(valueToRemove, iRecord);
}
- } else {
- if (origValue instanceof Collection) {
- for (final Object origValueItem : (Collection<?>) origValue) {
- if (origValueItem != null) {
- index.remove(origValueItem, iRecord);
- }
- }
- } else if (origValue != null) {
- index.remove(origValue, iRecord);
+ }
+
+ for (final Object valueToAdd : valuesToAdd) {
+ if (valueToAdd != null) {
+ index.put(valueToAdd, iRecord);
}
+ }
+ } else {
+ deleteIndexKey(index, iRecord, origValue);
- if (newValue instanceof Collection) {
- for (final Object newValueItem : (Collection<?>) newValue) {
- index.put(newValueItem, iRecord.placeholder());
- }
- } else if (newValue != null) {
- index.put(newValue, iRecord.placeholder());
+ if (newValue instanceof Collection) {
+ for (final Object newValueItem : (Collection<?>) newValue) {
+ index.put(newValueItem, iRecord.placeholder());
}
+ } else if (newValue != null) {
+ index.put(newValue, iRecord.placeholder());
}
}
}
- private boolean processCompositeIndexDelete(final OIndex<?> index, final Set<String> dirtyFields, final ODocument iRecord) {
- final OIndexDefinition indexDefinition = index.getDefinition();
+ private static boolean processCompositeIndexDelete(final OIndex<?> index, final Set<String> dirtyFields, final ODocument iRecord) {
+ final OCompositeIndexDefinition indexDefinition = (OCompositeIndexDefinition) index.getDefinition();
+
+ final String multiValueField = indexDefinition.getMultiValueField();
final List<String> indexFields = indexDefinition.getFields();
for (final String indexField : indexFields) {
@@ -315,15 +353,27 @@ private boolean processCompositeIndexDelete(final OIndex<?> index, final Set<Str
final List<Object> origValues = new ArrayList<Object>(indexFields.size());
for (final String field : indexFields) {
- if (dirtyFields.contains(field))
- origValues.add(iRecord.getOriginalValue(field));
+ if (!field.equals(multiValueField))
+ if (dirtyFields.contains(field))
+ origValues.add(iRecord.getOriginalValue(field));
+ else
+ origValues.add(iRecord.<Object> field(field));
+ }
+
+ if (multiValueField != null) {
+ final OMultiValueChangeTimeLine<?, ?> multiValueChangeTimeLine = iRecord.getCollectionTimeLine(multiValueField);
+ if (multiValueChangeTimeLine != null) {
+ final OTrackedMultiValue fieldValue = iRecord.field(multiValueField);
+ final Object restoredMultiValue = fieldValue.returnOriginalState(multiValueChangeTimeLine.getMultiValueChangeEvents());
+ origValues.add(indexDefinition.getMultiValueDefinitionIndex(), restoredMultiValue);
+ } else if (dirtyFields.contains(multiValueField))
+ origValues.add(indexDefinition.getMultiValueDefinitionIndex(), iRecord.getOriginalValue(multiValueField));
else
- origValues.add(iRecord.<Object> field(field));
+ origValues.add(indexDefinition.getMultiValueDefinitionIndex(), iRecord.field(multiValueField));
}
final Object origValue = indexDefinition.createValue(origValues);
- if (origValue != null)
- index.remove(origValue, iRecord);
+ deleteIndexKey(index, iRecord, origValue);
return true;
}
@@ -331,8 +381,19 @@ private boolean processCompositeIndexDelete(final OIndex<?> index, final Set<Str
return false;
}
+ private static void deleteIndexKey(OIndex<?> index, ODocument iRecord, Object origValue) {
+ if (origValue instanceof Collection) {
+ for (final Object valueItem : (Collection<?>) origValue) {
+ if (valueItem != null)
+ index.remove(valueItem, iRecord);
+ }
+ } else if (origValue != null) {
+ index.remove(origValue, iRecord);
+ }
+ }
+
@SuppressWarnings({ "rawtypes", "unchecked" })
- private boolean processSingleIndexDelete(final OIndex<?> index, final Set<String> dirtyFields, final ODocument iRecord) {
+ private static boolean processSingleIndexDelete(final OIndex<?> index, final Set<String> dirtyFields, final ODocument iRecord) {
final OIndexDefinition indexDefinition = index.getDefinition();
final List<String> indexFields = indexDefinition.getFields();
@@ -352,21 +413,13 @@ private boolean processSingleIndexDelete(final OIndex<?> index, final Set<String
} else
origValue = indexDefinition.createValue(iRecord.getOriginalValue(indexField));
- if (origValue instanceof Collection) {
- for (final Object valueItem : (Collection<?>) origValue) {
- if (valueItem != null) {
- index.remove(valueItem, iRecord);
- }
- }
- } else if (origValue != null) {
- index.remove(origValue, iRecord);
- }
+ deleteIndexKey(index, iRecord, origValue);
return true;
}
return false;
}
- private void checkIndexedPropertiesOnCreation(final ODocument iRecord) {
+ private static void checkIndexedPropertiesOnCreation(final ODocument iRecord) {
final OClass cls = iRecord.getSchemaClass();
if (cls == null)
return;
@@ -386,7 +439,7 @@ private void checkIndexedPropertiesOnCreation(final ODocument iRecord) {
}
}
- private void acquireModificationLock(final ODocument iRecord) {
+ private static void acquireModificationLock(final ODocument iRecord) {
final OClass cls = iRecord.getSchemaClass();
if (cls == null)
return;
@@ -406,7 +459,7 @@ public int compare(OIndex<?> indexOne, OIndex<?> indexTwo) {
}
}
- private void releaseModificationLock(final ODocument iRecord) {
+ private static void releaseModificationLock(final ODocument iRecord) {
final OClass cls = iRecord.getSchemaClass();
if (cls == null)
return;
@@ -417,7 +470,7 @@ private void releaseModificationLock(final ODocument iRecord) {
}
}
- private void checkIndexedPropertiesOnUpdate(final ODocument iRecord) {
+ private static void checkIndexedPropertiesOnUpdate(final ODocument iRecord) {
final OClass cls = iRecord.getSchemaClass();
if (cls == null)
return;
@@ -448,7 +501,7 @@ private void checkIndexedPropertiesOnUpdate(final ODocument iRecord) {
}
}
- private ODocument checkForLoading(final ODocument iRecord) {
+ private static ODocument checkForLoading(final ODocument iRecord) {
if (iRecord.getInternalStatus() == ORecordElement.STATUS.NOT_LOADED) {
try {
return (ODocument) iRecord.load();
diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/OCompositeIndexDefinition.java b/core/src/main/java/com/orientechnologies/orient/core/index/OCompositeIndexDefinition.java
index b70418b6802..8bb1d48c224 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/index/OCompositeIndexDefinition.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/index/OCompositeIndexDefinition.java
@@ -18,12 +18,16 @@
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
+import java.util.Map;
+import java.util.Set;
import com.orientechnologies.common.collection.OCompositeKey;
+import com.orientechnologies.orient.core.db.record.OMultiValueChangeEvent;
import com.orientechnologies.orient.core.db.record.ORecordElement;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.impl.ODocument;
@@ -36,9 +40,10 @@
public class OCompositeIndexDefinition extends ODocumentWrapperNoClass implements OIndexDefinition {
private final List<OIndexDefinition> indexDefinitions;
private String className;
+ private int multiValueDefinitionIndex = -1;
public OCompositeIndexDefinition() {
- indexDefinitions = new LinkedList<OIndexDefinition>();
+ indexDefinitions = new ArrayList<OIndexDefinition>(5);
}
/**
@@ -50,7 +55,7 @@ public OCompositeIndexDefinition() {
public OCompositeIndexDefinition(final String iClassName) {
super(new ODocument());
- indexDefinitions = new LinkedList<OIndexDefinition>();
+ indexDefinitions = new ArrayList<OIndexDefinition>(5);
className = iClassName;
}
@@ -64,8 +69,18 @@ public OCompositeIndexDefinition(final String iClassName) {
*/
public OCompositeIndexDefinition(final String iClassName, final List<? extends OIndexDefinition> iIndexes) {
super(new ODocument());
- indexDefinitions = new LinkedList<OIndexDefinition>();
- indexDefinitions.addAll(iIndexes);
+
+ indexDefinitions = new ArrayList<OIndexDefinition>(5);
+ for (OIndexDefinition indexDefinition : iIndexes) {
+ indexDefinitions.add(indexDefinition);
+
+ if (indexDefinition instanceof OIndexDefinitionMultiValue)
+ if (multiValueDefinitionIndex == -1)
+ multiValueDefinitionIndex = indexDefinitions.size() - 1;
+ else
+ throw new OIndexException("Composite key can not contain more than one collection item");
+ }
+
className = iClassName;
}
@@ -84,6 +99,13 @@ public String getClassName() {
*/
public void addIndex(final OIndexDefinition indexDefinition) {
indexDefinitions.add(indexDefinition);
+ if (indexDefinition instanceof OIndexDefinitionMultiValue) {
+ if (multiValueDefinitionIndex == -1)
+ multiValueDefinitionIndex = indexDefinitions.size() - 1;
+ else
+ throw new OIndexException("Composite key can not contain more than one collection item");
+ }
+
}
/**
@@ -97,11 +119,26 @@ public List<String> getFields() {
return Collections.unmodifiableList(fields);
}
+ /**
+ * {@inheritDoc}
+ */
+ public List<String> getFieldsToIndex() {
+ final List<String> fields = new LinkedList<String>();
+ for (final OIndexDefinition indexDefinition : indexDefinitions) {
+ fields.addAll(indexDefinition.getFieldsToIndex());
+ }
+ return Collections.unmodifiableList(fields);
+ }
+
/**
* {@inheritDoc}
*/
public Object getDocumentValueToIndex(final ODocument iDocument) {
- final OCompositeKey compositeKey = new OCompositeKey();
+ final List<OCompositeKey> compositeKeys = new ArrayList<OCompositeKey>(10);
+ final OCompositeKey firstKey = new OCompositeKey();
+ boolean containsCollection = false;
+
+ compositeKeys.add(firstKey);
for (final OIndexDefinition indexDefinition : indexDefinitions) {
final Object result = indexDefinition.getDocumentValueToIndex(iDocument);
@@ -109,18 +146,37 @@ public Object getDocumentValueToIndex(final ODocument iDocument) {
if (result == null)
return null;
- compositeKey.addKey(result);
+ containsCollection = addKey(firstKey, compositeKeys, containsCollection, result);
}
- return compositeKey;
+ if (!containsCollection)
+ return firstKey;
+
+ return compositeKeys;
+ }
+
+ public int getMultiValueDefinitionIndex() {
+ return multiValueDefinitionIndex;
+ }
+
+ public String getMultiValueField() {
+ if (multiValueDefinitionIndex >= 0)
+ return indexDefinitions.get(multiValueDefinitionIndex).getFields().get(0);
+
+ return null;
}
/**
* {@inheritDoc}
*/
- public Comparable<?> createValue(final List<?> params) {
+ public Object createValue(final List<?> params) {
int currentParamIndex = 0;
- final OCompositeKey compositeKey = new OCompositeKey();
+ final OCompositeKey firstKey = new OCompositeKey();
+
+ final List<OCompositeKey> compositeKeys = new ArrayList<OCompositeKey>(10);
+ compositeKeys.add(firstKey);
+
+ boolean containsCollection = false;
for (final OIndexDefinition indexDefinition : indexDefinitions) {
if (currentParamIndex + 1 > params.size())
@@ -137,6 +193,49 @@ public Comparable<?> createValue(final List<?> params) {
final Object keyValue = indexDefinition.createValue(indexParams);
+ if (keyValue == null)
+ return null;
+
+ containsCollection = addKey(firstKey, compositeKeys, containsCollection, keyValue);
+ }
+
+ if (!containsCollection)
+ return firstKey;
+
+ return compositeKeys;
+ }
+
+ public OIndexDefinitionMultiValue getMultiValueDefinition() {
+ if (multiValueDefinitionIndex > -1)
+ return (OIndexDefinitionMultiValue) indexDefinitions.get(multiValueDefinitionIndex);
+
+ return null;
+ }
+
+ public OCompositeKey createSingleValue(final List<?> params) {
+ final OCompositeKey compositeKey = new OCompositeKey();
+ int currentParamIndex = 0;
+
+ for (final OIndexDefinition indexDefinition : indexDefinitions) {
+ if (currentParamIndex + 1 > params.size())
+ break;
+
+ final int endIndex;
+ if (currentParamIndex + indexDefinition.getParamCount() > params.size())
+ endIndex = params.size();
+ else
+ endIndex = currentParamIndex + indexDefinition.getParamCount();
+
+ final List<?> indexParams = params.subList(currentParamIndex, endIndex);
+ currentParamIndex += indexDefinition.getParamCount();
+
+ final Object keyValue;
+
+ if (indexDefinition instanceof OIndexDefinitionMultiValue)
+ keyValue = ((OIndexDefinitionMultiValue) indexDefinition).createSingleValue(indexParams.toArray());
+ else
+ keyValue = indexDefinition.createValue(indexParams);
+
if (keyValue == null)
return null;
@@ -146,13 +245,58 @@ public Comparable<?> createValue(final List<?> params) {
return compositeKey;
}
+ private static boolean addKey(OCompositeKey firstKey, List<OCompositeKey> compositeKeys, boolean containsCollection,
+ Object keyValue) {
+ if (keyValue instanceof Collection) {
+ final Collection<?> collectionKey = (Collection<?>) keyValue;
+ if (!containsCollection)
+ for (int i = 1; i < collectionKey.size(); i++) {
+ final OCompositeKey compositeKey = new OCompositeKey(firstKey.getKeys());
+ compositeKeys.add(compositeKey);
+ }
+ else
+ throw new OIndexException("Composite key can not contain more than one collection item");
+
+ int compositeIndex = 0;
+ for (final Object keyItem : collectionKey) {
+ final OCompositeKey compositeKey = compositeKeys.get(compositeIndex);
+ compositeKey.addKey(keyItem);
+
+ compositeIndex++;
+ }
+
+ containsCollection = true;
+ } else if (containsCollection)
+ for (final OCompositeKey compositeKey : compositeKeys)
+ compositeKey.addKey(keyValue);
+ else
+ firstKey.addKey(keyValue);
+
+ return containsCollection;
+ }
+
/**
* {@inheritDoc}
*/
- public Comparable<?> createValue(final Object... params) {
+ public Object createValue(final Object... params) {
return createValue(Arrays.asList(params));
}
+ public void processChangeEvent(OMultiValueChangeEvent<?, ?> changeEvent, Map<OCompositeKey, Integer> keysToAdd,
+ Map<OCompositeKey, Integer> keysToRemove, Object... params) {
+
+ final OIndexDefinitionMultiValue indexDefinitionMultiValue = (OIndexDefinitionMultiValue) indexDefinitions
+ .get(multiValueDefinitionIndex);
+
+ final CompositeWrapperMap compositeWrapperKeysToAdd = new CompositeWrapperMap(keysToAdd, indexDefinitions, params,
+ multiValueDefinitionIndex);
+
+ final CompositeWrapperMap compositeWrapperKeysToRemove = new CompositeWrapperMap(keysToRemove, indexDefinitions, params,
+ multiValueDefinitionIndex);
+
+ indexDefinitionMultiValue.processChangeEvent(changeEvent, compositeWrapperKeysToAdd, compositeWrapperKeysToRemove);
+ }
+
/**
* {@inheritDoc}
*/
@@ -235,7 +379,7 @@ public String toCreateIndexDDL(final String indexName, final String indexType) {
final StringBuilder ddl = new StringBuilder("create index ");
ddl.append(indexName).append(" on ").append(className).append(" ( ");
- final Iterator<String> fieldIterator = getFields().iterator();
+ final Iterator<String> fieldIterator = getFieldsToIndex().iterator();
if (fieldIterator.hasNext()) {
ddl.append(fieldIterator.next());
while (fieldIterator.hasNext()) {
@@ -244,14 +388,16 @@ public String toCreateIndexDDL(final String indexName, final String indexType) {
}
ddl.append(" ) ").append(indexType).append(' ');
- boolean first = true;
- for (OType oType : getTypes()) {
- if (first)
- first = false;
- else
- ddl.append(", ");
+ if (multiValueDefinitionIndex == -1) {
+ boolean first = true;
+ for (OType oType : getTypes()) {
+ if (first)
+ first = false;
+ else
+ ddl.append(", ");
- ddl.append(oType.name());
+ ddl.append(oType.name());
+ }
}
return ddl.toString();
@@ -277,6 +423,9 @@ protected void fromStream() {
indexDefinition.fromStream(indDoc);
indexDefinitions.add(indexDefinition);
+
+ if (indexDefinition instanceof OIndexDefinitionMultiValue)
+ multiValueDefinitionIndex = indexDefinitions.size() - 1;
}
} catch (final ClassNotFoundException e) {
@@ -291,4 +440,85 @@ protected void fromStream() {
throw new OIndexException("Error during composite index deserialization", e);
}
}
+
+ private static final class CompositeWrapperMap implements Map<Object, Integer> {
+ private final Map<OCompositeKey, Integer> underlying;
+ private final Object[] params;
+ private final List<OIndexDefinition> indexDefinitions;
+ private final int multiValueIndex;
+
+ private CompositeWrapperMap(Map<OCompositeKey, Integer> underlying, List<OIndexDefinition> indexDefinitions, Object[] params,
+ int multiValueIndex) {
+ this.underlying = underlying;
+ this.params = params;
+ this.multiValueIndex = multiValueIndex;
+ this.indexDefinitions = indexDefinitions;
+ }
+
+ public int size() {
+ return underlying.size();
+ }
+
+ public boolean isEmpty() {
+ return underlying.isEmpty();
+ }
+
+ public boolean containsKey(Object key) {
+ final OCompositeKey compositeKey = convertToCompositeKey(key);
+
+ return underlying.containsKey(compositeKey);
+ }
+
+ public boolean containsValue(Object value) {
+ return underlying.containsValue(value);
+ }
+
+ public Integer get(Object key) {
+ return underlying.get(convertToCompositeKey(key));
+ }
+
+ public Integer put(Object key, Integer value) {
+ final OCompositeKey compositeKey = convertToCompositeKey(key);
+ return underlying.put(compositeKey, value);
+ }
+
+ public Integer remove(Object key) {
+ return underlying.remove(convertToCompositeKey(key));
+ }
+
+ public void putAll(Map<? extends Object, ? extends Integer> m) {
+ throw new UnsupportedOperationException("Unsupported because of performance reasons");
+ }
+
+ public void clear() {
+ underlying.clear();
+ }
+
+ public Set<Object> keySet() {
+ throw new UnsupportedOperationException("Unsupported because of performance reasons");
+ }
+
+ public Collection<Integer> values() {
+ return underlying.values();
+ }
+
+ public Set<Entry<Object, Integer>> entrySet() {
+ throw new UnsupportedOperationException();
+ }
+
+ private OCompositeKey convertToCompositeKey(Object key) {
+ final OCompositeKey compositeKey = new OCompositeKey();
+
+ int paramsIndex = 0;
+ for (int i = 0; i < indexDefinitions.size(); i++) {
+ final OIndexDefinition indexDefinition = indexDefinitions.get(i);
+ if (i != multiValueIndex) {
+ compositeKey.addKey(indexDefinition.createValue(params[paramsIndex]));
+ paramsIndex++;
+ } else
+ compositeKey.addKey(((OIndexDefinitionMultiValue) indexDefinition).createSingleValue(key));
+ }
+ return compositeKey;
+ }
+ }
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexDefinition.java b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexDefinition.java
index cfdfcbf1c03..79f183b550b 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexDefinition.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexDefinition.java
@@ -30,86 +30,92 @@
* @author Andrey Lomakin, Artem Orobets
*/
public interface OIndexDefinition extends OIndexCallback {
- /**
- * @return Names of fields which given index is used to calculate key value. Order of fields is important.
- */
- public List<String> getFields();
+ /**
+ * @return Names of fields which given index is used to calculate key value. Order of fields is important.
+ */
+ public List<String> getFields();
- /**
- * @return Name of the class which this index belongs to.
- */
- public String getClassName();
+ /**
+ * @return Names of fields and their index modifiers (like "by value" for fields that hold <code>Map</code> values) which given
+ * index is used to calculate key value. Order of fields is important.
+ */
+ public List<String> getFieldsToIndex();
- /**
- * {@inheritDoc}
- */
- public boolean equals(Object index);
+ /**
+ * @return Name of the class which this index belongs to.
+ */
+ public String getClassName();
- /**
- * {@inheritDoc}
- */
- public int hashCode();
+ /**
+ * {@inheritDoc}
+ */
+ public boolean equals(Object index);
- /**
- * {@inheritDoc}
- */
- public String toString();
+ /**
+ * {@inheritDoc}
+ */
+ public int hashCode();
- /**
- * Calculates key value by passed in parameters.
- *
- * If it is impossible to calculate key value by given parameters <code>null</code> will be returned.
- *
- * @param params
- * Parameters from which index key will be calculated.
- *
- * @return Key value or null if calculation is impossible.
- */
- public Object createValue(List<?> params);
+ /**
+ * {@inheritDoc}
+ */
+ public String toString();
- /**
- * Calculates key value by passed in parameters.
- *
- * If it is impossible to calculate key value by given parameters <code>null</code> will be returned.
- *
- *
- * @param params
- * Parameters from which index key will be calculated.
- *
- * @return Key value or null if calculation is impossible.
- */
- public Object createValue(Object... params);
+ /**
+ * Calculates key value by passed in parameters.
+ *
+ * If it is impossible to calculate key value by given parameters <code>null</code> will be returned.
+ *
+ * @param params
+ * Parameters from which index key will be calculated.
+ *
+ * @return Key value or null if calculation is impossible.
+ */
+ public Object createValue(List<?> params);
- /**
- * Returns amount of parameters that are used to calculate key value. It does not mean that all parameters should be supplied. It
- * only means that if you provide more parameters they will be ignored and will not participate in index key calculation.
- *
- * @return Amount of that are used to calculate key value. Call result should be equals to {@code getTypes().length}.
- */
- public int getParamCount();
+ /**
+ * Calculates key value by passed in parameters.
+ *
+ * If it is impossible to calculate key value by given parameters <code>null</code> will be returned.
+ *
+ *
+ * @param params
+ * Parameters from which index key will be calculated.
+ *
+ * @return Key value or null if calculation is impossible.
+ */
+ public Object createValue(Object... params);
- /**
- * Return types of values from which index key consist. In case of index that is built on single document property value single
- * array that contains property type will be returned. In case of composite indexes result will contain several key types.
- *
- * @return Types of values from which index key consist.
- */
- public OType[] getTypes();
+ /**
+ * Returns amount of parameters that are used to calculate key value. It does not mean that all parameters should be supplied. It
+ * only means that if you provide more parameters they will be ignored and will not participate in index key calculation.
+ *
+ * @return Amount of that are used to calculate key value. Call result should be equals to {@code getTypes().length}.
+ */
+ public int getParamCount();
- /**
- * Serializes internal index state to document.
- *
- * @return Document that contains internal index state.
- */
- public ODocument toStream();
+ /**
+ * Return types of values from which index key consist. In case of index that is built on single document property value single
+ * array that contains property type will be returned. In case of composite indexes result will contain several key types.
+ *
+ * @return Types of values from which index key consist.
+ */
+ public OType[] getTypes();
- /**
- * Deserialize internal index state from document.
- *
- * @param document
- * Serialized index presentation.
- */
- public void fromStream(ODocument document);
+ /**
+ * Serializes internal index state to document.
+ *
+ * @return Document that contains internal index state.
+ */
+ public ODocument toStream();
- public String toCreateIndexDDL(String indexName, String indexType);
+ /**
+ * Deserialize internal index state from document.
+ *
+ * @param document
+ * Serialized index presentation.
+ */
+ public void fromStream(ODocument document);
+
+ public String toCreateIndexDDL(String indexName, String indexType);
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexDefinitionFactory.java b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexDefinitionFactory.java
index becbcbe6e29..854b9bb083e 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexDefinitionFactory.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexDefinitionFactory.java
@@ -1,6 +1,7 @@
package com.orientechnologies.orient.core.index;
import java.util.List;
+import java.util.regex.Pattern;
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.metadata.schema.OProperty;
@@ -14,6 +15,8 @@
* @author Artem Orobets
*/
public class OIndexDefinitionFactory {
+ private static final Pattern FILED_NAME_PATTERN = Pattern.compile("\\s+");
+
/**
* Creates an instance of {@link OIndexDefinition} for automatic index.
*
@@ -43,35 +46,26 @@ public static OIndexDefinition createIndexDefinition(final OClass oClass, final
* @return extracted property name
*/
public static String extractFieldName(final String fieldDefinition) {
- String[] fieldNameParts = fieldDefinition.split("\\s+");
+ String[] fieldNameParts = FILED_NAME_PATTERN.split(fieldDefinition);
if (fieldNameParts.length == 1)
return fieldDefinition;
if (fieldNameParts.length == 3 && "by".equalsIgnoreCase(fieldNameParts[1]))
return fieldNameParts[0];
throw new IllegalArgumentException("Illegal field name format, should be '<property> [by key|value]' but was '"
- + fieldDefinition + "'");
+ + fieldDefinition + '\'');
}
private static OIndexDefinition createMultipleFieldIndexDefinition(final OClass oClass, final List<String> fieldsToIndex,
final List<OType> types) {
- final OIndexDefinition indexDefinition;
final String className = oClass.getName();
final OCompositeIndexDefinition compositeIndex = new OCompositeIndexDefinition(className);
for (int i = 0, fieldsToIndexSize = fieldsToIndex.size(); i < fieldsToIndexSize; i++) {
- String fieldName = adjustFieldName(oClass, fieldsToIndex.get(i));
- final OType propertyType = types.get(i);
- if (propertyType.equals(OType.EMBEDDEDLIST) || propertyType.equals(OType.EMBEDDEDSET) || propertyType.equals(OType.LINKSET)
- || propertyType.equals(OType.LINKLIST) || propertyType.equals(OType.EMBEDDEDMAP) || propertyType.equals(OType.LINKMAP))
- throw new OIndexException("Collections are not supported in composite indexes");
-
- final OPropertyIndexDefinition propertyIndex = new OPropertyIndexDefinition(className, fieldName, propertyType);
- compositeIndex.addIndex(propertyIndex);
+ compositeIndex.addIndex(createSingleFieldIndexDefinition(oClass, fieldsToIndex.get(i), types.get(i)));
}
- indexDefinition = compositeIndex;
- return indexDefinition;
+ return compositeIndex;
}
private static void checkTypes(OClass oClass, List<String> fieldNames, List<OType> types) {
@@ -135,7 +129,7 @@ else if (type.equals(OType.LINKLIST)) {
}
private static OPropertyMapIndexDefinition.INDEX_BY extractMapIndexSpecifier(final String fieldName) {
- String[] fieldNameParts = fieldName.split("\\s+");
+ String[] fieldNameParts = FILED_NAME_PATTERN.split(fieldName);
if (fieldNameParts.length == 1)
return OPropertyMapIndexDefinition.INDEX_BY.KEY;
@@ -145,12 +139,12 @@ private static OPropertyMapIndexDefinition.INDEX_BY extractMapIndexSpecifier(fin
return OPropertyMapIndexDefinition.INDEX_BY.valueOf(fieldNameParts[2].toUpperCase());
} catch (IllegalArgumentException iae) {
throw new IllegalArgumentException("Illegal field name format, should be '<property> [by key|value]' but was '"
- + fieldName + "'");
+ + fieldName + '\'');
}
}
throw new IllegalArgumentException("Illegal field name format, should be '<property> [by key|value]' but was '" + fieldName
- + "'");
+ + '\'');
}
private static String adjustFieldName(final OClass clazz, final String fieldName) {
diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexDefinitionMultiValue.java b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexDefinitionMultiValue.java
index 6e93bc81818..b54c650516b 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexDefinitionMultiValue.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexDefinitionMultiValue.java
@@ -19,30 +19,34 @@
import com.orientechnologies.orient.core.db.record.OMultiValueChangeEvent;
- /**
- * Interface that indicates that index definition is based on collection of values but not on single value.
- *
- * @author <a href="mailto:[email protected]">Andrey Lomakin</a>
- * @since 20.12.11
- */
- public interface OIndexDefinitionMultiValue extends OIndexDefinition {
+/**
+ * Interface that indicates that index definition is based on collection of values but not on single value.
+ *
+ * @author <a href="mailto:[email protected]">Andrey Lomakin</a>
+ * @since 20.12.11
+ */
+public interface OIndexDefinitionMultiValue extends OIndexDefinition {
- /**
- * Converts passed in value in the key of single index entry.
- *
- * @param param Value to convert.
- * @return Index key.
- */
- public Object createSingleValue(final Object param);
+ /**
+ * Converts passed in value in the key of single index entry.
+ *
+ * @param param
+ * Value to convert.
+ * @return Index key.
+ */
+ public Object createSingleValue(final Object... param);
- /**
- * Process event that contains operation on collection and extract values that should be added removed from index
- * to reflect collection changes in the given index.
- *
- * @param changeEvent Event that describes operation that was performed on collection.
- * @param keysToAdd Values that should be added to related index.
- * @param keysToRemove Values that should be removed to related index.
- */
- public void processChangeEvent(final OMultiValueChangeEvent<?,?> changeEvent, final Map<Object, Integer> keysToAdd,
- final Map<Object, Integer> keysToRemove);
+ /**
+ * Process event that contains operation on collection and extract values that should be added removed from index to reflect
+ * collection changes in the given index.
+ *
+ * @param changeEvent
+ * Event that describes operation that was performed on collection.
+ * @param keysToAdd
+ * Values that should be added to related index.
+ * @param keysToRemove
+ * Values that should be removed to related index.
+ */
+ public void processChangeEvent(final OMultiValueChangeEvent<?, ?> changeEvent, final Map<Object, Integer> keysToAdd,
+ final Map<Object, Integer> keysToRemove);
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyIndexDefinition.java b/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyIndexDefinition.java
index 2ceba637455..be35084b3bb 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyIndexDefinition.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyIndexDefinition.java
@@ -55,6 +55,10 @@ public List<String> getFields() {
return Collections.singletonList(field);
}
+ public List<String> getFieldsToIndex() {
+ return Collections.singletonList(field);
+ }
+
public Object getDocumentValueToIndex(final ODocument iDocument) {
if (OType.LINK.equals(keyType)) {
final OIdentifiable identifiable = iDocument.field(field, OType.LINK);
diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyListIndexDefinition.java b/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyListIndexDefinition.java
index 83d4d5a9558..23cdf4db719 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyListIndexDefinition.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyListIndexDefinition.java
@@ -33,69 +33,69 @@
*/
public class OPropertyListIndexDefinition extends OAbstractIndexDefinitionMultiValue implements OIndexDefinitionMultiValue {
- public OPropertyListIndexDefinition(final String iClassName, final String iField, final OType iType) {
- super(iClassName, iField, iType);
- }
+ public OPropertyListIndexDefinition(final String iClassName, final String iField, final OType iType) {
+ super(iClassName, iField, iType);
+ }
- public OPropertyListIndexDefinition() {
- }
+ public OPropertyListIndexDefinition() {
+ }
- @Override
- public Object getDocumentValueToIndex(ODocument iDocument) {
- return createValue(iDocument.field(field));
- }
+ @Override
+ public Object getDocumentValueToIndex(ODocument iDocument) {
+ return createValue(iDocument.field(field));
+ }
- @Override
- public Object createValue(final List<?> params) {
- if (!(params.get(0) instanceof Collection))
- return null;
+ @Override
+ public Object createValue(final List<?> params) {
+ if (!(params.get(0) instanceof Collection))
+ return null;
- final Collection<?> multiValueCollection = (Collection<?>) params.get(0);
- final List<Object> values = new ArrayList<Object>(multiValueCollection.size());
- for (final Object item : multiValueCollection) {
- values.add(createSingleValue(item));
- }
- return values;
- }
+ final Collection<?> multiValueCollection = (Collection<?>) params.get(0);
+ final List<Object> values = new ArrayList<Object>(multiValueCollection.size());
+ for (final Object item : multiValueCollection) {
+ values.add(createSingleValue(item));
+ }
+ return values;
+ }
- @Override
- public Object createValue(final Object... params) {
- if (!(params[0] instanceof Collection)) {
- return null;
- }
+ @Override
+ public Object createValue(final Object... params) {
+ if (!(params[0] instanceof Collection)) {
+ return null;
+ }
- final Collection<?> multiValueCollection = (Collection<?>) params[0];
- final List<Object> values = new ArrayList<Object>(multiValueCollection.size());
- for (final Object item : multiValueCollection) {
- values.add(createSingleValue(item));
- }
- return values;
- }
+ final Collection<?> multiValueCollection = (Collection<?>) params[0];
+ final List<Object> values = new ArrayList<Object>(multiValueCollection.size());
+ for (final Object item : multiValueCollection) {
+ values.add(createSingleValue(item));
+ }
+ return values;
+ }
- public Object createSingleValue(final Object param) {
- return OType.convert(param, keyType.getDefaultJavaType());
- }
+ public Object createSingleValue(final Object... param) {
+ return OType.convert(param[0], keyType.getDefaultJavaType());
+ }
- public void processChangeEvent(final OMultiValueChangeEvent<?, ?> changeEvent, final Map<Object, Integer> keysToAdd,
- final Map<Object, Integer> keysToRemove) {
- switch (changeEvent.getChangeType()) {
- case ADD: {
- processAdd(createSingleValue(changeEvent.getValue()), keysToAdd, keysToRemove);
- break;
- }
- case REMOVE: {
- processRemoval(createSingleValue(changeEvent.getOldValue()), keysToAdd, keysToRemove);
- break;
- }
- case UPDATE: {
- processRemoval(createSingleValue(changeEvent.getOldValue()), keysToAdd, keysToRemove);
- processAdd(createSingleValue(changeEvent.getValue()), keysToAdd, keysToRemove);
- break;
- }
- default:
- throw new IllegalArgumentException("Invalid change type : " + changeEvent.getChangeType());
- }
- }
+ public void processChangeEvent(final OMultiValueChangeEvent<?, ?> changeEvent, final Map<Object, Integer> keysToAdd,
+ final Map<Object, Integer> keysToRemove) {
+ switch (changeEvent.getChangeType()) {
+ case ADD: {
+ processAdd(createSingleValue(changeEvent.getValue()), keysToAdd, keysToRemove);
+ break;
+ }
+ case REMOVE: {
+ processRemoval(createSingleValue(changeEvent.getOldValue()), keysToAdd, keysToRemove);
+ break;
+ }
+ case UPDATE: {
+ processRemoval(createSingleValue(changeEvent.getOldValue()), keysToAdd, keysToRemove);
+ processAdd(createSingleValue(changeEvent.getValue()), keysToAdd, keysToRemove);
+ break;
+ }
+ default:
+ throw new IllegalArgumentException("Invalid change type : " + changeEvent.getChangeType());
+ }
+ }
@Override
public String toCreateIndexDDL(String indexName, String indexType) {
diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyMapIndexDefinition.java b/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyMapIndexDefinition.java
index 023b0fe2b5e..976c87a21e9 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyMapIndexDefinition.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyMapIndexDefinition.java
@@ -17,6 +17,7 @@
import java.util.ArrayList;
import java.util.Collection;
+import java.util.Collections;
import java.util.List;
import java.util.Map;
@@ -32,176 +33,183 @@
*/
public class OPropertyMapIndexDefinition extends OAbstractIndexDefinitionMultiValue implements OIndexDefinitionMultiValue {
- /**
- * Indicates whether Map will be indexed using its keys or values.
- */
- public static enum INDEX_BY {
- KEY, VALUE
- }
-
- private INDEX_BY indexBy = INDEX_BY.KEY;
-
- public OPropertyMapIndexDefinition() {
- }
-
- public OPropertyMapIndexDefinition(final String iClassName, final String iField, final OType iType, final INDEX_BY indexBy) {
- super(iClassName, iField, iType);
-
- if (indexBy == null)
- throw new NullPointerException("You have to provide way by which map entries should be mapped");
-
- this.indexBy = indexBy;
- }
-
- @Override
- public Object getDocumentValueToIndex(ODocument iDocument) {
- return createValue(iDocument.field(field));
- }
-
- @Override
- public Object createValue(List<?> params) {
- if (!(params.get(0) instanceof Map))
- return null;
-
- final Collection<?> mapParams = extractMapParams((Map<?, ?>) params.get(0));
- final List<Object> result = new ArrayList<Object>(mapParams.size());
- for (final Object mapParam : mapParams) {
- result.add(createSingleValue(mapParam));
- }
-
- return result;
- }
-
- @Override
- public Object createValue(Object... params) {
- if (!(params[0] instanceof Map))
- return null;
-
- final Collection<?> mapParams = extractMapParams((Map<?, ?>) params[0]);
-
- final List<Object> result = new ArrayList<Object>(mapParams.size());
- for (final Object mapParam : mapParams) {
- result.add(createSingleValue(mapParam));
- }
-
- return result;
- }
-
- public INDEX_BY getIndexBy() {
- return indexBy;
- }
-
- @Override
- protected void serializeToStream() {
- super.serializeToStream();
- document.field("mapIndexBy", indexBy.toString());
- }
-
- @Override
- protected void serializeFromStream() {
- super.serializeFromStream();
- indexBy = INDEX_BY.valueOf(document.<String> field("mapIndexBy"));
- }
-
- private Collection<?> extractMapParams(Map<?, ?> map) {
- if (indexBy == INDEX_BY.KEY)
- return map.keySet();
-
- return map.values();
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o)
- return true;
- if (o == null || getClass() != o.getClass())
- return false;
- if (!super.equals(o))
- return false;
-
- OPropertyMapIndexDefinition that = (OPropertyMapIndexDefinition) o;
-
- if (indexBy != that.indexBy)
- return false;
-
- return true;
- }
-
- public Object createSingleValue(final Object param) {
- return OType.convert(param, keyType.getDefaultJavaType());
- }
-
- public void processChangeEvent(final OMultiValueChangeEvent<?, ?> changeEvent, final Map<Object, Integer> keysToAdd,
- final Map<Object, Integer> keysToRemove) {
- final boolean result;
- if (indexBy.equals(INDEX_BY.KEY))
- result = processKeyChangeEvent(changeEvent, keysToAdd, keysToRemove);
- else
- result = processValueChangeEvent(changeEvent, keysToAdd, keysToRemove);
-
- if (!result)
- throw new IllegalArgumentException("Invalid change type :" + changeEvent.getChangeType());
- }
-
- private boolean processKeyChangeEvent(final OMultiValueChangeEvent<?, ?> changeEvent, final Map<Object, Integer> keysToAdd,
- final Map<Object, Integer> keysToRemove) {
- switch (changeEvent.getChangeType()) {
- case ADD:
- processAdd(createSingleValue(changeEvent.getKey()), keysToAdd, keysToRemove);
- return true;
- case REMOVE:
- processRemoval(createSingleValue(changeEvent.getKey()), keysToAdd, keysToRemove);
- return true;
- case UPDATE:
- return true;
- }
- return false;
- }
-
- private boolean processValueChangeEvent(final OMultiValueChangeEvent<?, ?> changeEvent, final Map<Object, Integer> keysToAdd,
- final Map<Object, Integer> keysToRemove) {
- switch (changeEvent.getChangeType()) {
- case ADD:
- processAdd(createSingleValue(changeEvent.getValue()), keysToAdd, keysToRemove);
- return true;
- case REMOVE:
- processRemoval(createSingleValue(changeEvent.getOldValue()), keysToAdd, keysToRemove);
- return true;
- case UPDATE:
- processRemoval(createSingleValue(changeEvent.getOldValue()), keysToAdd, keysToRemove);
- processAdd(createSingleValue(changeEvent.getValue()), keysToAdd, keysToRemove);
- return true;
- }
- return false;
- }
-
- @Override
- public int hashCode() {
- int result = super.hashCode();
- result = 31 * result + indexBy.hashCode();
- return result;
- }
-
- @Override
- public String toString() {
- return "OPropertyMapIndexDefinition{" + "indexBy=" + indexBy + "} " + super.toString();
- }
-
- @Override
- public String toCreateIndexDDL(String indexName, String indexType) {
- final StringBuilder ddl = new StringBuilder("create index ");
-
- ddl.append(indexName).append(" on ");
- ddl.append(className).append(" ( ").append(field);
-
- if (indexBy == INDEX_BY.KEY)
- ddl.append(" by key");
- else
- ddl.append(" by value");
-
- ddl.append(" ) ");
- ddl.append(indexType);
-
- return ddl.toString();
- }
+ /**
+ * Indicates whether Map will be indexed using its keys or values.
+ */
+ public static enum INDEX_BY {
+ KEY, VALUE
+ }
+
+ private INDEX_BY indexBy = INDEX_BY.KEY;
+
+ public OPropertyMapIndexDefinition() {
+ }
+
+ public OPropertyMapIndexDefinition(final String iClassName, final String iField, final OType iType, final INDEX_BY indexBy) {
+ super(iClassName, iField, iType);
+
+ if (indexBy == null)
+ throw new NullPointerException("You have to provide way by which map entries should be mapped");
+
+ this.indexBy = indexBy;
+ }
+
+ @Override
+ public Object getDocumentValueToIndex(ODocument iDocument) {
+ return createValue(iDocument.field(field));
+ }
+
+ @Override
+ public Object createValue(List<?> params) {
+ if (!(params.get(0) instanceof Map))
+ return null;
+
+ final Collection<?> mapParams = extractMapParams((Map<?, ?>) params.get(0));
+ final List<Object> result = new ArrayList<Object>(mapParams.size());
+ for (final Object mapParam : mapParams) {
+ result.add(createSingleValue(mapParam));
+ }
+
+ return result;
+ }
+
+ @Override
+ public Object createValue(Object... params) {
+ if (!(params[0] instanceof Map))
+ return null;
+
+ final Collection<?> mapParams = extractMapParams((Map<?, ?>) params[0]);
+
+ final List<Object> result = new ArrayList<Object>(mapParams.size());
+ for (final Object mapParam : mapParams) {
+ result.add(createSingleValue(mapParam));
+ }
+
+ return result;
+ }
+
+ public INDEX_BY getIndexBy() {
+ return indexBy;
+ }
+
+ @Override
+ protected void serializeToStream() {
+ super.serializeToStream();
+ document.field("mapIndexBy", indexBy.toString());
+ }
+
+ @Override
+ protected void serializeFromStream() {
+ super.serializeFromStream();
+ indexBy = INDEX_BY.valueOf(document.<String> field("mapIndexBy"));
+ }
+
+ private Collection<?> extractMapParams(Map<?, ?> map) {
+ if (indexBy == INDEX_BY.KEY)
+ return map.keySet();
+
+ return map.values();
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o)
+ return true;
+ if (o == null || getClass() != o.getClass())
+ return false;
+ if (!super.equals(o))
+ return false;
+
+ OPropertyMapIndexDefinition that = (OPropertyMapIndexDefinition) o;
+
+ if (indexBy != that.indexBy)
+ return false;
+
+ return true;
+ }
+
+ public Object createSingleValue(final Object... param) {
+ return OType.convert(param[0], keyType.getDefaultJavaType());
+ }
+
+ public void processChangeEvent(final OMultiValueChangeEvent<?, ?> changeEvent, final Map<Object, Integer> keysToAdd,
+ final Map<Object, Integer> keysToRemove) {
+ final boolean result;
+ if (indexBy.equals(INDEX_BY.KEY))
+ result = processKeyChangeEvent(changeEvent, keysToAdd, keysToRemove);
+ else
+ result = processValueChangeEvent(changeEvent, keysToAdd, keysToRemove);
+
+ if (!result)
+ throw new IllegalArgumentException("Invalid change type :" + changeEvent.getChangeType());
+ }
+
+ private boolean processKeyChangeEvent(final OMultiValueChangeEvent<?, ?> changeEvent, final Map<Object, Integer> keysToAdd,
+ final Map<Object, Integer> keysToRemove) {
+ switch (changeEvent.getChangeType()) {
+ case ADD:
+ processAdd(createSingleValue(changeEvent.getKey()), keysToAdd, keysToRemove);
+ return true;
+ case REMOVE:
+ processRemoval(createSingleValue(changeEvent.getKey()), keysToAdd, keysToRemove);
+ return true;
+ case UPDATE:
+ return true;
+ }
+ return false;
+ }
+
+ private boolean processValueChangeEvent(final OMultiValueChangeEvent<?, ?> changeEvent, final Map<Object, Integer> keysToAdd,
+ final Map<Object, Integer> keysToRemove) {
+ switch (changeEvent.getChangeType()) {
+ case ADD:
+ processAdd(createSingleValue(changeEvent.getValue()), keysToAdd, keysToRemove);
+ return true;
+ case REMOVE:
+ processRemoval(createSingleValue(changeEvent.getOldValue()), keysToAdd, keysToRemove);
+ return true;
+ case UPDATE:
+ processRemoval(createSingleValue(changeEvent.getOldValue()), keysToAdd, keysToRemove);
+ processAdd(createSingleValue(changeEvent.getValue()), keysToAdd, keysToRemove);
+ return true;
+ }
+ return false;
+ }
+
+ @Override
+ public List<String> getFieldsToIndex() {
+ if (indexBy == INDEX_BY.KEY)
+ return Collections.singletonList(field + " by key");
+ return Collections.singletonList(field + " by value");
+ }
+
+ @Override
+ public int hashCode() {
+ int result = super.hashCode();
+ result = 31 * result + indexBy.hashCode();
+ return result;
+ }
+
+ @Override
+ public String toString() {
+ return "OPropertyMapIndexDefinition{" + "indexBy=" + indexBy + "} " + super.toString();
+ }
+
+ @Override
+ public String toCreateIndexDDL(String indexName, String indexType) {
+ final StringBuilder ddl = new StringBuilder("create index ");
+
+ ddl.append(indexName).append(" on ");
+ ddl.append(className).append(" ( ").append(field);
+
+ if (indexBy == INDEX_BY.KEY)
+ ddl.append(" by key");
+ else
+ ddl.append(" by value");
+
+ ddl.append(" ) ");
+ ddl.append(indexType);
+
+ return ddl.toString();
+ }
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/ORuntimeKeyIndexDefinition.java b/core/src/main/java/com/orientechnologies/orient/core/index/ORuntimeKeyIndexDefinition.java
index 270af80d814..ae6509a47f4 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/index/ORuntimeKeyIndexDefinition.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/index/ORuntimeKeyIndexDefinition.java
@@ -53,6 +53,10 @@ public List<String> getFields() {
return Collections.emptyList();
}
+ public List<String> getFieldsToIndex() {
+ return Collections.emptyList();
+ }
+
public String getClassName() {
return null;
}
@@ -129,7 +133,7 @@ public String toString() {
*/
public String toCreateIndexDDL(final String indexName, final String indexType) {
final StringBuilder ddl = new StringBuilder("create index ");
- ddl.append(indexName).append(" ").append(indexType).append(" ");
+ ddl.append(indexName).append(' ').append(indexType).append(' ');
ddl.append("runtime ").append(serializer.getId());
return ddl.toString();
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/OSimpleKeyIndexDefinition.java b/core/src/main/java/com/orientechnologies/orient/core/index/OSimpleKeyIndexDefinition.java
index 3f17667999f..d4452d6edb7 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/index/OSimpleKeyIndexDefinition.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/index/OSimpleKeyIndexDefinition.java
@@ -12,131 +12,135 @@
import com.orientechnologies.orient.core.type.ODocumentWrapperNoClass;
public class OSimpleKeyIndexDefinition extends ODocumentWrapperNoClass implements OIndexDefinition {
- private OType[] keyTypes;
-
- public OSimpleKeyIndexDefinition(final OType... keyTypes) {
- super(new ODocument());
- this.keyTypes = keyTypes;
- }
-
- public OSimpleKeyIndexDefinition() {
- }
-
- public List<String> getFields() {
- return Collections.emptyList();
- }
-
- public String getClassName() {
- return null;
- }
-
- public Comparable<?> createValue(final List<?> params) {
- return createValue(params != null ? params.toArray() : null);
- }
-
- public Comparable<?> createValue(final Object... params) {
- if (params == null || params.length == 0)
- return null;
-
- if (keyTypes.length == 1)
- return (Comparable<?>) OType.convert(params[0], keyTypes[0].getDefaultJavaType());
-
- final OCompositeKey compositeKey = new OCompositeKey();
-
- for (int i = 0; i < params.length; ++i) {
- final Comparable<?> paramValue = (Comparable<?>) OType.convert(params[i], keyTypes[i].getDefaultJavaType());
-
- if (paramValue == null)
- return null;
- compositeKey.addKey(paramValue);
- }
-
- return compositeKey;
- }
-
- public int getParamCount() {
- return keyTypes.length;
- }
-
- public OType[] getTypes() {
- return keyTypes;
- }
-
- @Override
- public ODocument toStream() {
- document.setInternalStatus(ORecordElement.STATUS.UNMARSHALLING);
- try {
-
- final List<String> keyTypeNames = new ArrayList<String>(keyTypes.length);
-
- for (final OType keyType : keyTypes)
- keyTypeNames.add(keyType.toString());
-
- document.field("keyTypes", keyTypeNames, OType.EMBEDDEDLIST);
- return document;
- } finally {
- document.setInternalStatus(ORecordElement.STATUS.LOADED);
- }
- }
-
- @Override
- protected void fromStream() {
- final List<String> keyTypeNames = document.field("keyTypes");
- keyTypes = new OType[keyTypeNames.size()];
-
- int i = 0;
- for (final String keyTypeName : keyTypeNames) {
- keyTypes[i] = OType.valueOf(keyTypeName);
- i++;
- }
- }
-
- public Object getDocumentValueToIndex(final ODocument iDocument) {
- throw new OIndexException("This method is not supported in given index definition.");
- }
-
- @Override
- public boolean equals(final Object o) {
- if (this == o)
- return true;
- if (o == null || getClass() != o.getClass())
- return false;
-
- final OSimpleKeyIndexDefinition that = (OSimpleKeyIndexDefinition) o;
- if (!Arrays.equals(keyTypes, that.keyTypes))
- return false;
-
- return true;
- }
-
- @Override
- public int hashCode() {
- int result = super.hashCode();
- result = 31 * result + (keyTypes != null ? Arrays.hashCode(keyTypes) : 0);
- return result;
- }
-
- @Override
- public String toString() {
- return "OSimpleKeyIndexDefinition{" + "keyTypes=" + (keyTypes == null ? null : Arrays.asList(keyTypes)) + '}';
- }
-
- /**
- * {@inheritDoc}
- *
- * @param indexName
- * @param indexType
- */
- public String toCreateIndexDDL(final String indexName, final String indexType) {
- final StringBuilder ddl = new StringBuilder("create index ");
- ddl.append(indexName).append(" ").append(indexType).append(" ");
-
- if (keyTypes != null && keyTypes.length > 0) {
- ddl.append(keyTypes[0].toString());
- for (int i = 1; i < keyTypes.length; i++) {
- ddl.append(", ").append(keyTypes[i].toString());
- }
- }
- return ddl.toString();
- }
+ private OType[] keyTypes;
+
+ public OSimpleKeyIndexDefinition(final OType... keyTypes) {
+ super(new ODocument());
+ this.keyTypes = keyTypes;
+ }
+
+ public OSimpleKeyIndexDefinition() {
+ }
+
+ public List<String> getFields() {
+ return Collections.emptyList();
+ }
+
+ public List<String> getFieldsToIndex() {
+ return Collections.emptyList();
+ }
+
+ public String getClassName() {
+ return null;
+ }
+
+ public Comparable<?> createValue(final List<?> params) {
+ return createValue(params != null ? params.toArray() : null);
+ }
+
+ public Comparable<?> createValue(final Object... params) {
+ if (params == null || params.length == 0)
+ return null;
+
+ if (keyTypes.length == 1)
+ return (Comparable<?>) OType.convert(params[0], keyTypes[0].getDefaultJavaType());
+
+ final OCompositeKey compositeKey = new OCompositeKey();
+
+ for (int i = 0; i < params.length; ++i) {
+ final Comparable<?> paramValue = (Comparable<?>) OType.convert(params[i], keyTypes[i].getDefaultJavaType());
+
+ if (paramValue == null)
+ return null;
+ compositeKey.addKey(paramValue);
+ }
+
+ return compositeKey;
+ }
+
+ public int getParamCount() {
+ return keyTypes.length;
+ }
+
+ public OType[] getTypes() {
+ return keyTypes;
+ }
+
+ @Override
+ public ODocument toStream() {
+ document.setInternalStatus(ORecordElement.STATUS.UNMARSHALLING);
+ try {
+
+ final List<String> keyTypeNames = new ArrayList<String>(keyTypes.length);
+
+ for (final OType keyType : keyTypes)
+ keyTypeNames.add(keyType.toString());
+
+ document.field("keyTypes", keyTypeNames, OType.EMBEDDEDLIST);
+ return document;
+ } finally {
+ document.setInternalStatus(ORecordElement.STATUS.LOADED);
+ }
+ }
+
+ @Override
+ protected void fromStream() {
+ final List<String> keyTypeNames = document.field("keyTypes");
+ keyTypes = new OType[keyTypeNames.size()];
+
+ int i = 0;
+ for (final String keyTypeName : keyTypeNames) {
+ keyTypes[i] = OType.valueOf(keyTypeName);
+ i++;
+ }
+ }
+
+ public Object getDocumentValueToIndex(final ODocument iDocument) {
+ throw new OIndexException("This method is not supported in given index definition.");
+ }
+
+ @Override
+ public boolean equals(final Object o) {
+ if (this == o)
+ return true;
+ if (o == null || getClass() != o.getClass())
+ return false;
+
+ final OSimpleKeyIndexDefinition that = (OSimpleKeyIndexDefinition) o;
+ if (!Arrays.equals(keyTypes, that.keyTypes))
+ return false;
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = super.hashCode();
+ result = 31 * result + (keyTypes != null ? Arrays.hashCode(keyTypes) : 0);
+ return result;
+ }
+
+ @Override
+ public String toString() {
+ return "OSimpleKeyIndexDefinition{" + "keyTypes=" + (keyTypes == null ? null : Arrays.asList(keyTypes)) + '}';
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @param indexName
+ * @param indexType
+ */
+ public String toCreateIndexDDL(final String indexName, final String indexType) {
+ final StringBuilder ddl = new StringBuilder("create index ");
+ ddl.append(indexName).append(' ').append(indexType).append(' ');
+
+ if (keyTypes != null && keyTypes.length > 0) {
+ ddl.append(keyTypes[0].toString());
+ for (int i = 1; i < keyTypes.length; i++) {
+ ddl.append(", ").append(keyTypes[i].toString());
+ }
+ }
+ return ddl.toString();
+ }
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLSelect.java b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLSelect.java
index 5b0b4b95d08..3377f29751b 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLSelect.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLSelect.java
@@ -60,7 +60,6 @@
import com.orientechnologies.orient.core.sql.operator.OIndexReuseType;
import com.orientechnologies.orient.core.sql.operator.OQueryOperator;
import com.orientechnologies.orient.core.sql.operator.OQueryOperatorBetween;
-import com.orientechnologies.orient.core.sql.operator.OQueryOperatorEquals;
import com.orientechnologies.orient.core.sql.operator.OQueryOperatorIn;
import com.orientechnologies.orient.core.sql.operator.OQueryOperatorMajor;
import com.orientechnologies.orient.core.sql.operator.OQueryOperatorMajorEquals;
@@ -382,11 +381,7 @@ public int compare(final OIndexSearchResult searchResultOne, final OIndexSearchR
final int searchResultFieldsCount = searchResult.fields().size();
final List<OIndex<?>> involvedIndexes = getInvolvedIndexes(iSchemaClass, searchResult);
- Collections.sort(involvedIndexes, new Comparator<OIndex>() {
- public int compare(final OIndex indexOne, final OIndex indexTwo) {
- return indexOne.getDefinition().getParamCount() - indexTwo.getDefinition().getParamCount();
- }
- });
+ Collections.sort(involvedIndexes, IndexComparator.INSTANCE);
// go through all possible index for given set of fields.
for (final OIndex index : involvedIndexes) {
@@ -395,7 +390,7 @@ public int compare(final OIndex indexOne, final OIndex indexTwo) {
// we need to test that last field in query subset and field in index that has the same position
// are equals.
- if (!(operator instanceof OQueryOperatorEquals)) {
+ if (!OIndexSearchResult.isIndexEqualityOperator(operator)) {
final String lastFiled = searchResult.lastField.getItemName(searchResult.lastField.getItemCount() - 1);
final String relatedIndexField = indexDefinition.getFields().get(searchResult.fieldValuePairs.size());
if (!lastFiled.equals(relatedIndexField))
@@ -423,7 +418,7 @@ public int compare(final OIndex indexOne, final OIndex indexTwo) {
return false;
}
- private List<OIndex<?>> getInvolvedIndexes(OClass iSchemaClass, OIndexSearchResult searchResultFields) {
+ private static List<OIndex<?>> getInvolvedIndexes(OClass iSchemaClass, OIndexSearchResult searchResultFields) {
final Set<OIndex<?>> involvedIndexes = iSchemaClass.getInvolvedIndexes(searchResultFields.fields());
final List<OIndex<?>> result = new ArrayList<OIndex<?>>(involvedIndexes.size());
@@ -438,18 +433,21 @@ private List<OIndex<?>> getInvolvedIndexes(OClass iSchemaClass, OIndexSearchResu
return result;
}
- private OIndexSearchResult analyzeQueryBranch(final OClass iSchemaClass, final OSQLFilterCondition iCondition,
+ private static OIndexSearchResult analyzeQueryBranch(final OClass iSchemaClass, OSQLFilterCondition iCondition,
final List<OIndexSearchResult> iIndexSearchResults) {
if (iCondition == null)
return null;
- final OQueryOperator operator = iCondition.getOperator();
- if (operator == null)
+ OQueryOperator operator = iCondition.getOperator();
+
+ while (operator == null) {
if (iCondition.getRight() == null && iCondition.getLeft() instanceof OSQLFilterCondition) {
- return analyzeQueryBranch(iSchemaClass, (OSQLFilterCondition) iCondition.getLeft(), iIndexSearchResults);
+ iCondition = (OSQLFilterCondition) iCondition.getLeft();
+ operator = iCondition.getOperator();
} else {
return null;
}
+ }
final OIndexReuseType indexReuseType = operator.getIndexReuseType(iCondition.getLeft(), iCondition.getRight());
if (indexReuseType.equals(OIndexReuseType.INDEX_INTERSECTION)) {
@@ -494,7 +492,7 @@ private OIndexSearchResult analyzeQueryBranch(final OClass iSchemaClass, final O
* Value to search
* @return true if the property was indexed and found, otherwise false
*/
- private OIndexSearchResult createIndexedProperty(final OSQLFilterCondition iCondition, final Object iItem) {
+ private static OIndexSearchResult createIndexedProperty(final OSQLFilterCondition iCondition, final Object iItem) {
if (iItem == null || !(iItem instanceof OSQLFilterItemField))
return null;
@@ -908,7 +906,7 @@ private boolean isIndexKeySizeQuery() {
return true;
}
- private Object getIndexKey(final OIndexDefinition indexDefinition, Object value) {
+ private static Object getIndexKey(final OIndexDefinition indexDefinition, Object value) {
if (indexDefinition instanceof OCompositeIndexDefinition) {
if (value instanceof List) {
final List<?> values = (List<?>) value;
@@ -939,7 +937,7 @@ protected void parseIndexSearchResult(final Collection<ODocument> entries) {
}
}
- private ODocument createIndexEntryAsDocument(final Object iKey, final OIdentifiable iValue) {
+ private static ODocument createIndexEntryAsDocument(final Object iKey, final OIdentifiable iValue) {
final ODocument doc = new ODocument().setOrdered(true);
doc.field("key", iKey);
doc.field("rid", iValue);
@@ -968,7 +966,7 @@ else if (projection.getValue() instanceof OSQLFunctionRuntime) {
}
}
- private boolean checkIndexExistence(OClass iSchemaClass, OIndexSearchResult result) {
+ private static boolean checkIndexExistence(OClass iSchemaClass, OIndexSearchResult result) {
if (!iSchemaClass.areIndexed(result.fields())) {
return false;
}
@@ -1024,4 +1022,12 @@ protected boolean optimizeExecution() {
return false;
}
+
+ private static class IndexComparator implements Comparator<OIndex> {
+ private static final IndexComparator INSTANCE = new IndexComparator();
+
+ public int compare(final OIndex indexOne, final OIndex indexTwo) {
+ return indexOne.getDefinition().getParamCount() - indexTwo.getDefinition().getParamCount();
+ }
+ }
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/OIndexSearchResult.java b/core/src/main/java/com/orientechnologies/orient/core/sql/OIndexSearchResult.java
index e16a0807051..30cbefc7129 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/OIndexSearchResult.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/OIndexSearchResult.java
@@ -7,6 +7,9 @@
import com.orientechnologies.orient.core.sql.filter.OSQLFilterItemField;
import com.orientechnologies.orient.core.sql.operator.OQueryOperator;
+import com.orientechnologies.orient.core.sql.operator.OQueryOperatorContains;
+import com.orientechnologies.orient.core.sql.operator.OQueryOperatorContainsKey;
+import com.orientechnologies.orient.core.sql.operator.OQueryOperatorContainsValue;
import com.orientechnologies.orient.core.sql.operator.OQueryOperatorEquals;
/**
@@ -21,64 +24,69 @@
* index search and filed - value pair that uses this index should always be placed at last position.
*/
public class OIndexSearchResult {
- final Map<String, Object> fieldValuePairs = new HashMap<String, Object>();
- final OQueryOperator lastOperator;
- final OSQLFilterItemField.FieldChain lastField;
- final Object lastValue;
+ final Map<String, Object> fieldValuePairs = new HashMap<String, Object>(8);
+ final OQueryOperator lastOperator;
+ final OSQLFilterItemField.FieldChain lastField;
+ final Object lastValue;
- OIndexSearchResult(final OQueryOperator lastOperator, final OSQLFilterItemField.FieldChain field, final Object value) {
- this.lastOperator = lastOperator;
- lastField = field;
- lastValue = value;
- }
+ OIndexSearchResult(final OQueryOperator lastOperator, final OSQLFilterItemField.FieldChain field, final Object value) {
+ this.lastOperator = lastOperator;
+ lastField = field;
+ lastValue = value;
+ }
- /**
- * Combines two queries subset into one. This operation will be valid only if {@link #canBeMerged(OIndexSearchResult)} method will
- * return <code>true</code> for the same passed in parameter.
- *
- * @param searchResult
- * Query subset to merge.
- * @return New instance that presents merged query.
- */
- OIndexSearchResult merge(final OIndexSearchResult searchResult) {
- final OQueryOperator operator;
- final OIndexSearchResult result;
+ /**
+ * Combines two queries subset into one. This operation will be valid only if {@link #canBeMerged(OIndexSearchResult)} method will
+ * return <code>true</code> for the same passed in parameter.
+ *
+ * @param searchResult
+ * Query subset to merge.
+ * @return New instance that presents merged query.
+ */
+ OIndexSearchResult merge(final OIndexSearchResult searchResult) {
+ final OQueryOperator operator;
+ final OIndexSearchResult result;
- if (searchResult.lastOperator instanceof OQueryOperatorEquals) {
- result = new OIndexSearchResult(this.lastOperator, lastField, lastValue);
- result.fieldValuePairs.putAll(searchResult.fieldValuePairs);
- result.fieldValuePairs.putAll(fieldValuePairs);
- result.fieldValuePairs.put(searchResult.lastField.getItemName(0), searchResult.lastValue);
- } else {
- operator = searchResult.lastOperator;
- result = new OIndexSearchResult(operator, searchResult.lastField, searchResult.lastValue);
- result.fieldValuePairs.putAll(searchResult.fieldValuePairs);
- result.fieldValuePairs.putAll(fieldValuePairs);
- result.fieldValuePairs.put(lastField.getItemName(0), lastValue);
- }
- return result;
- }
+ if (searchResult.lastOperator instanceof OQueryOperatorEquals) {
+ result = new OIndexSearchResult(this.lastOperator, lastField, lastValue);
+ result.fieldValuePairs.putAll(searchResult.fieldValuePairs);
+ result.fieldValuePairs.putAll(fieldValuePairs);
+ result.fieldValuePairs.put(searchResult.lastField.getItemName(0), searchResult.lastValue);
+ } else {
+ operator = searchResult.lastOperator;
+ result = new OIndexSearchResult(operator, searchResult.lastField, searchResult.lastValue);
+ result.fieldValuePairs.putAll(searchResult.fieldValuePairs);
+ result.fieldValuePairs.putAll(fieldValuePairs);
+ result.fieldValuePairs.put(lastField.getItemName(0), lastValue);
+ }
+ return result;
+ }
- /**
- * @param searchResult
- * Query subset is going to be merged with given one.
- * @return <code>true</code> if two query subsets can be merged.
- */
- boolean canBeMerged(final OIndexSearchResult searchResult) {
- if (lastField.isLong() || searchResult.lastField.isLong()) {
- return false;
- }
- return (lastOperator instanceof OQueryOperatorEquals) || (searchResult.lastOperator instanceof OQueryOperatorEquals);
- }
+ /**
+ * @param searchResult
+ * Query subset is going to be merged with given one.
+ * @return <code>true</code> if two query subsets can be merged.
+ */
+ boolean canBeMerged(final OIndexSearchResult searchResult) {
+ if (lastField.isLong() || searchResult.lastField.isLong()) {
+ return false;
+ }
+ return isIndexEqualityOperator(lastOperator) || isIndexEqualityOperator(searchResult.lastOperator);
+ }
- List<String> fields() {
- final List<String> result = new ArrayList<String>(fieldValuePairs.size() + 1);
- result.addAll(fieldValuePairs.keySet());
- result.add(lastField.getItemName(0));
- return result;
- }
+ List<String> fields() {
+ final List<String> result = new ArrayList<String>(fieldValuePairs.size() + 1);
+ result.addAll(fieldValuePairs.keySet());
+ result.add(lastField.getItemName(0));
+ return result;
+ }
- int getFieldCount() {
- return fieldValuePairs.size() + 1;
- }
-}
\ No newline at end of file
+ int getFieldCount() {
+ return fieldValuePairs.size() + 1;
+ }
+
+ public static boolean isIndexEqualityOperator(OQueryOperator queryOperator) {
+ return queryOperator instanceof OQueryOperatorEquals || queryOperator instanceof OQueryOperatorContains
+ || queryOperator instanceof OQueryOperatorContainsKey || queryOperator instanceof OQueryOperatorContainsValue;
+ }
+}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorBetween.java b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorBetween.java
index 6d3a7c4089e..57e8dbcb8e3 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorBetween.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorBetween.java
@@ -26,6 +26,7 @@
import com.orientechnologies.orient.core.command.OCommandContext;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
+import com.orientechnologies.orient.core.index.OCompositeIndexDefinition;
import com.orientechnologies.orient.core.index.OIndex;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexInternal;
@@ -108,6 +109,8 @@ public Collection<OIdentifiable> executeIndexQuery(OIndex<?> index, List<Object>
else
result = index.getValuesBetween(keyOne, true, keyTwo, true);
} else {
+ final OCompositeIndexDefinition compositeIndexDefinition = (OCompositeIndexDefinition) indexDefinition;
+
final Object[] betweenKeys = (Object[]) keyParams.get(keyParams.size() - 1);
final Object betweenKeyOne = OSQLHelper.getValue(betweenKeys[0]);
@@ -128,12 +131,12 @@ public Collection<OIdentifiable> executeIndexQuery(OIndex<?> index, List<Object>
betweenKeyTwoParams.addAll(keyParams.subList(0, keyParams.size() - 1));
betweenKeyTwoParams.add(betweenKeyTwo);
- final Object keyOne = indexDefinition.createValue(betweenKeyOneParams);
+ final Object keyOne = compositeIndexDefinition.createSingleValue(betweenKeyOneParams);
if (keyOne == null)
return null;
- final Object keyTwo = indexDefinition.createValue(betweenKeyTwoParams);
+ final Object keyTwo = compositeIndexDefinition.createSingleValue(betweenKeyTwoParams);
if (keyTwo == null)
return null;
@@ -146,6 +149,8 @@ public Collection<OIdentifiable> executeIndexQuery(OIndex<?> index, List<Object>
if (OProfiler.getInstance().isRecording()) {
OProfiler.getInstance().updateCounter("Query.compositeIndexUsage", 1);
OProfiler.getInstance().updateCounter("Query.compositeIndexUsage." + indexDefinition.getParamCount(), 1);
+ OProfiler.getInstance().updateCounter(
+ "Query.compositeIndexUsage." + indexDefinition.getParamCount() + '.' + keyParams.size(), 1);
}
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorContains.java b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorContains.java
index 0d4faf90b3e..bdc7782e424 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorContains.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorContains.java
@@ -15,21 +15,23 @@
*/
package com.orientechnologies.orient.core.sql.operator;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import com.orientechnologies.common.profiler.OProfiler;
import com.orientechnologies.orient.core.command.OCommandContext;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
+import com.orientechnologies.orient.core.index.OCompositeIndexDefinition;
import com.orientechnologies.orient.core.index.OIndex;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexDefinitionMultiValue;
import com.orientechnologies.orient.core.index.OIndexInternal;
import com.orientechnologies.orient.core.sql.filter.OSQLFilterCondition;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
/**
* CONTAINS operator.
*
@@ -135,11 +137,37 @@ public Collection<OIdentifiable> executeIndexQuery(OIndex<?> index, List<Object>
if (indexResult instanceof Collection)
return (Collection<OIdentifiable>) indexResult;
- if(indexResult == null)
- return Collections.emptyList();
- return Collections.singletonList((OIdentifiable) indexResult);
+ if (indexResult == null)
+ return Collections.emptyList();
+ return Collections.singletonList((OIdentifiable) indexResult);
+ } else {
+ // in case of composite keys several items can be returned in case of we perform search
+ // using part of composite key stored in index.
+
+ final OCompositeIndexDefinition compositeIndexDefinition = (OCompositeIndexDefinition) indexDefinition;
+
+ final Object keyOne = compositeIndexDefinition.createSingleValue(keyParams);
+
+ if (keyOne == null)
+ return null;
+
+ final Object keyTwo = compositeIndexDefinition.createSingleValue(keyParams);
+
+ final Collection<OIdentifiable> result;
+ if (fetchLimit > -1)
+ result = index.getValuesBetween(keyOne, true, keyTwo, true, fetchLimit);
+ else
+ result = index.getValuesBetween(keyOne, true, keyTwo, true);
+
+ if (OProfiler.getInstance().isRecording()) {
+ OProfiler.getInstance().updateCounter("Query.compositeIndexUsage", 1);
+ OProfiler.getInstance().updateCounter("Query.compositeIndexUsage." + indexDefinition.getParamCount(), 1);
+ OProfiler.getInstance().updateCounter(
+ "Query.compositeIndexUsage." + indexDefinition.getParamCount() + '.' + keyParams.size(), 1);
+ }
+
+ return result;
}
- return null;
}
@Override
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorContainsKey.java b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorContainsKey.java
index ce7891be5a3..6f4f0b8f4f9 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorContainsKey.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorContainsKey.java
@@ -15,9 +15,16 @@
*/
package com.orientechnologies.orient.core.sql.operator;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+import com.orientechnologies.common.profiler.OProfiler;
import com.orientechnologies.orient.core.command.OCommandContext;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
+import com.orientechnologies.orient.core.index.OCompositeIndexDefinition;
import com.orientechnologies.orient.core.index.OIndex;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexDefinitionMultiValue;
@@ -25,11 +32,6 @@
import com.orientechnologies.orient.core.index.OPropertyMapIndexDefinition;
import com.orientechnologies.orient.core.sql.filter.OSQLFilterCondition;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
/**
* CONTAINS KEY operator.
*
@@ -69,20 +71,16 @@ public OIndexReuseType getIndexReuseType(final Object iLeft, final Object iRight
public Collection<OIdentifiable> executeIndexQuery(OIndex<?> index, List<Object> keyParams, int fetchLimit) {
final OIndexDefinition indexDefinition = index.getDefinition();
- if (!((index.getDefinition() instanceof OPropertyMapIndexDefinition) && ((OPropertyMapIndexDefinition) index.getDefinition())
- .getIndexBy() == OPropertyMapIndexDefinition.INDEX_BY.KEY))
- return null;
-
final OIndexInternal<?> internalIndex = index.getInternal();
if (!internalIndex.canBeUsedInEqualityOperators())
return null;
if (indexDefinition.getParamCount() == 1) {
- final Object key;
- if (indexDefinition instanceof OIndexDefinitionMultiValue)
- key = ((OIndexDefinitionMultiValue) indexDefinition).createSingleValue(keyParams.get(0));
- else
- key = indexDefinition.createValue(keyParams);
+ if (!((indexDefinition instanceof OPropertyMapIndexDefinition) && ((OPropertyMapIndexDefinition) indexDefinition)
+ .getIndexBy() == OPropertyMapIndexDefinition.INDEX_BY.KEY))
+ return null;
+
+ final Object key = ((OIndexDefinitionMultiValue) indexDefinition).createSingleValue(keyParams.get(0));
if (key == null)
return null;
@@ -91,12 +89,42 @@ public Collection<OIdentifiable> executeIndexQuery(OIndex<?> index, List<Object>
if (indexResult instanceof Collection)
return (Collection<OIdentifiable>) indexResult;
- if(indexResult == null)
- return Collections.emptyList();
- return Collections.singletonList((OIdentifiable) indexResult);
- }
+ if (indexResult == null)
+ return Collections.emptyList();
+ return Collections.singletonList((OIdentifiable) indexResult);
+ } else {
+ // in case of composite keys several items can be returned in case of we perform search
+ // using part of composite key stored in index.
- return null;
+ final OCompositeIndexDefinition compositeIndexDefinition = (OCompositeIndexDefinition) indexDefinition;
+
+ if (!((compositeIndexDefinition.getMultiValueDefinition() instanceof OPropertyMapIndexDefinition) && ((OPropertyMapIndexDefinition) compositeIndexDefinition
+ .getMultiValueDefinition()).getIndexBy() == OPropertyMapIndexDefinition.INDEX_BY.KEY))
+ return null;
+
+ final Object keyOne = compositeIndexDefinition.createSingleValue(keyParams);
+
+ if (keyOne == null)
+ return null;
+
+ final Object keyTwo = compositeIndexDefinition.createSingleValue(keyParams);
+
+ final Collection<OIdentifiable> result;
+ if (fetchLimit > -1)
+ result = index.getValuesBetween(keyOne, true, keyTwo, true, fetchLimit);
+ else
+ result = index.getValuesBetween(keyOne, true, keyTwo, true);
+
+ if (OProfiler.getInstance().isRecording()) {
+ OProfiler.getInstance().updateCounter("Query.compositeIndexUsage", 1);
+ OProfiler.getInstance().updateCounter("Query.compositeIndexUsage." + indexDefinition.getParamCount(), 1);
+ OProfiler.getInstance().updateCounter(
+ "Query.compositeIndexUsage." + indexDefinition.getParamCount() + '.' + keyParams.size(), 1);
+ }
+
+ return result;
+
+ }
}
@Override
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorContainsValue.java b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorContainsValue.java
index 8295ed4123e..95b6d0829da 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorContainsValue.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorContainsValue.java
@@ -15,12 +15,19 @@
*/
package com.orientechnologies.orient.core.sql.operator;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
import com.orientechnologies.common.exception.OException;
+import com.orientechnologies.common.profiler.OProfiler;
import com.orientechnologies.orient.core.command.OCommandContext;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.db.record.ORecordElement;
import com.orientechnologies.orient.core.exception.ORecordNotFoundException;
import com.orientechnologies.orient.core.id.ORID;
+import com.orientechnologies.orient.core.index.OCompositeIndexDefinition;
import com.orientechnologies.orient.core.index.OIndex;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexDefinitionMultiValue;
@@ -30,11 +37,6 @@
import com.orientechnologies.orient.core.record.ORecordSchemaAware;
import com.orientechnologies.orient.core.sql.filter.OSQLFilterCondition;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
/**
* CONTAINS KEY operator.
*
@@ -114,20 +116,16 @@ public OIndexReuseType getIndexReuseType(final Object iLeft, final Object iRight
public Collection<OIdentifiable> executeIndexQuery(OIndex<?> index, List<Object> keyParams, int fetchLimit) {
final OIndexDefinition indexDefinition = index.getDefinition();
- if (!((index.getDefinition() instanceof OPropertyMapIndexDefinition) && ((OPropertyMapIndexDefinition) index.getDefinition())
- .getIndexBy() == OPropertyMapIndexDefinition.INDEX_BY.VALUE))
- return null;
-
final OIndexInternal<?> internalIndex = index.getInternal();
if (!internalIndex.canBeUsedInEqualityOperators())
return null;
if (indexDefinition.getParamCount() == 1) {
- final Object key;
- if (indexDefinition instanceof OIndexDefinitionMultiValue)
- key = ((OIndexDefinitionMultiValue) indexDefinition).createSingleValue(keyParams.get(0));
- else
- key = indexDefinition.createValue(keyParams);
+ if (!((indexDefinition instanceof OPropertyMapIndexDefinition) && ((OPropertyMapIndexDefinition) indexDefinition)
+ .getIndexBy() == OPropertyMapIndexDefinition.INDEX_BY.VALUE))
+ return null;
+
+ final Object key = ((OIndexDefinitionMultiValue) indexDefinition).createSingleValue(keyParams.get(0));
if (key == null)
return null;
@@ -136,11 +134,40 @@ public Collection<OIdentifiable> executeIndexQuery(OIndex<?> index, List<Object>
if (indexResult instanceof Collection)
return (Collection<OIdentifiable>) indexResult;
- if(indexResult == null)
- return Collections.emptyList();
- return Collections.singletonList((OIdentifiable) indexResult);
- }
- return null;
+ if (indexResult == null)
+ return Collections.emptyList();
+ return Collections.singletonList((OIdentifiable) indexResult);
+ } else {
+ // in case of composite keys several items can be returned in case of we perform search
+ // using part of composite key stored in index.
+ final OCompositeIndexDefinition compositeIndexDefinition = (OCompositeIndexDefinition) indexDefinition;
+
+ if (!((compositeIndexDefinition.getMultiValueDefinition() instanceof OPropertyMapIndexDefinition) && ((OPropertyMapIndexDefinition) compositeIndexDefinition
+ .getMultiValueDefinition()).getIndexBy() == OPropertyMapIndexDefinition.INDEX_BY.VALUE))
+ return null;
+
+ final Object keyOne = compositeIndexDefinition.createSingleValue(keyParams);
+
+ if (keyOne == null)
+ return null;
+
+ final Object keyTwo = compositeIndexDefinition.createSingleValue(keyParams);
+
+ final Collection<OIdentifiable> result;
+ if (fetchLimit > -1)
+ result = index.getValuesBetween(keyOne, true, keyTwo, true, fetchLimit);
+ else
+ result = index.getValuesBetween(keyOne, true, keyTwo, true);
+
+ if (OProfiler.getInstance().isRecording()) {
+ OProfiler.getInstance().updateCounter("Query.compositeIndexUsage", 1);
+ OProfiler.getInstance().updateCounter("Query.compositeIndexUsage." + indexDefinition.getParamCount(), 1);
+ OProfiler.getInstance().updateCounter(
+ "Query.compositeIndexUsage." + indexDefinition.getParamCount() + '.' + keyParams.size(), 1);
+ }
+
+ return result;
+ }
}
@Override
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorEquals.java b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorEquals.java
index d8ae966b95f..b02cb692d81 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorEquals.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorEquals.java
@@ -15,10 +15,15 @@
*/
package com.orientechnologies.orient.core.sql.operator;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+
import com.orientechnologies.common.profiler.OProfiler;
import com.orientechnologies.orient.core.command.OCommandContext;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
+import com.orientechnologies.orient.core.index.OCompositeIndexDefinition;
import com.orientechnologies.orient.core.index.OIndex;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexDefinitionMultiValue;
@@ -31,10 +36,6 @@
import com.orientechnologies.orient.core.sql.filter.OSQLFilterItemField;
import com.orientechnologies.orient.core.sql.filter.OSQLFilterItemParameter;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-
/**
* EQUALS operator.
*
@@ -120,19 +121,21 @@ public Collection<OIdentifiable> executeIndexQuery(OIndex<?> index, List<Object>
if (indexResult instanceof Collection)
return (Collection<OIdentifiable>) indexResult;
- if(indexResult == null)
- return Collections.emptyList();
- return Collections.singletonList((OIdentifiable) indexResult);
- } else {
+ if (indexResult == null)
+ return Collections.emptyList();
+ return Collections.singletonList((OIdentifiable) indexResult);
+ } else {
// in case of composite keys several items can be returned in case of we perform search
// using part of composite key stored in index.
- final Object keyOne = indexDefinition.createValue(keyParams);
+ final OCompositeIndexDefinition compositeIndexDefinition = (OCompositeIndexDefinition) indexDefinition;
+
+ final Object keyOne = compositeIndexDefinition.createSingleValue(keyParams);
if (keyOne == null)
return null;
- final Object keyTwo = indexDefinition.createValue(keyParams);
+ final Object keyTwo = compositeIndexDefinition.createSingleValue(keyParams);
final Collection<OIdentifiable> result;
if (fetchLimit > -1)
@@ -143,6 +146,8 @@ public Collection<OIdentifiable> executeIndexQuery(OIndex<?> index, List<Object>
if (OProfiler.getInstance().isRecording()) {
OProfiler.getInstance().updateCounter("Query.compositeIndexUsage", 1);
OProfiler.getInstance().updateCounter("Query.compositeIndexUsage." + indexDefinition.getParamCount(), 1);
+ OProfiler.getInstance().updateCounter(
+ "Query.compositeIndexUsage." + indexDefinition.getParamCount() + '.' + keyParams.size(), 1);
}
return result;
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorMajor.java b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorMajor.java
index 0f258d3d683..0dcbbcccca6 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorMajor.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorMajor.java
@@ -23,6 +23,7 @@
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.id.ORecordId;
+import com.orientechnologies.orient.core.index.OCompositeIndexDefinition;
import com.orientechnologies.orient.core.index.OIndex;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexDefinitionMultiValue;
@@ -91,12 +92,14 @@ public Collection<OIdentifiable> executeIndexQuery(OIndex<?> index, List<Object>
// index that contains keys with values field1=1 and field2=2 and which right included boundary
// is the biggest composite key in the index that contains key with value field1=1.
- final Object keyOne = indexDefinition.createValue(keyParams);
+ final OCompositeIndexDefinition compositeIndexDefinition = (OCompositeIndexDefinition) indexDefinition;
+
+ final Object keyOne = compositeIndexDefinition.createSingleValue(keyParams);
if (keyOne == null)
return null;
- final Object keyTwo = indexDefinition.createValue(keyParams.subList(0, keyParams.size() - 1));
+ final Object keyTwo = compositeIndexDefinition.createSingleValue(keyParams.subList(0, keyParams.size() - 1));
if (keyTwo == null)
return null;
@@ -109,6 +112,8 @@ public Collection<OIdentifiable> executeIndexQuery(OIndex<?> index, List<Object>
if (OProfiler.getInstance().isRecording()) {
OProfiler.getInstance().updateCounter("Query.compositeIndexUsage", 1);
OProfiler.getInstance().updateCounter("Query.compositeIndexUsage." + indexDefinition.getParamCount(), 1);
+ OProfiler.getInstance().updateCounter(
+ "Query.compositeIndexUsage." + indexDefinition.getParamCount() + '.' + keyParams.size(), 1);
}
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorMajorEquals.java b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorMajorEquals.java
index 151e05c3b6b..86aae9a5b83 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorMajorEquals.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorMajorEquals.java
@@ -22,6 +22,7 @@
import com.orientechnologies.orient.core.command.OCommandContext;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
+import com.orientechnologies.orient.core.index.OCompositeIndexDefinition;
import com.orientechnologies.orient.core.index.OIndex;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexDefinitionMultiValue;
@@ -90,12 +91,14 @@ public Collection<OIdentifiable> executeIndexQuery(OIndex<?> index, List<Object>
// index that contains keys with values field1=1 and field2=2 and which right included boundary
// is the biggest composite key in the index that contains key with value field1=1.
- final Object keyOne = indexDefinition.createValue(keyParams);
+ final OCompositeIndexDefinition compositeIndexDefinition = (OCompositeIndexDefinition) indexDefinition;
+
+ final Object keyOne = compositeIndexDefinition.createSingleValue(keyParams);
if (keyOne == null)
return null;
- final Object keyTwo = indexDefinition.createValue(keyParams.subList(0, keyParams.size() - 1));
+ final Object keyTwo = compositeIndexDefinition.createSingleValue(keyParams.subList(0, keyParams.size() - 1));
if (keyTwo == null)
return null;
@@ -108,6 +111,8 @@ public Collection<OIdentifiable> executeIndexQuery(OIndex<?> index, List<Object>
if (OProfiler.getInstance().isRecording()) {
OProfiler.getInstance().updateCounter("Query.compositeIndexUsage", 1);
OProfiler.getInstance().updateCounter("Query.compositeIndexUsage." + indexDefinition.getParamCount(), 1);
+ OProfiler.getInstance().updateCounter(
+ "Query.compositeIndexUsage." + indexDefinition.getParamCount() + '.' + keyParams.size(), 1);
}
}
return result;
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorMinor.java b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorMinor.java
index 747a27f6b8c..f67eae28964 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorMinor.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorMinor.java
@@ -22,6 +22,7 @@
import com.orientechnologies.orient.core.command.OCommandContext;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
+import com.orientechnologies.orient.core.index.OCompositeIndexDefinition;
import com.orientechnologies.orient.core.index.OIndex;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexDefinitionMultiValue;
@@ -90,12 +91,14 @@ public Collection<OIdentifiable> executeIndexQuery(OIndex<?> index, List<Object>
// index that contains key with value field1=1 and which right not included boundary
// is the biggest composite key in the index that contains key with values field1=1 and field2=2.
- final Object keyOne = indexDefinition.createValue(keyParams.subList(0, keyParams.size() - 1));
+ final OCompositeIndexDefinition compositeIndexDefinition = (OCompositeIndexDefinition) indexDefinition;
+
+ final Object keyOne = compositeIndexDefinition.createSingleValue(keyParams.subList(0, keyParams.size() - 1));
if (keyOne == null)
return null;
- final Object keyTwo = indexDefinition.createValue(keyParams);
+ final Object keyTwo = compositeIndexDefinition.createSingleValue(keyParams);
if (keyTwo == null)
return null;
@@ -108,6 +111,8 @@ public Collection<OIdentifiable> executeIndexQuery(OIndex<?> index, List<Object>
if (OProfiler.getInstance().isRecording()) {
OProfiler.getInstance().updateCounter("Query.compositeIndexUsage", 1);
OProfiler.getInstance().updateCounter("Query.compositeIndexUsage." + indexDefinition.getParamCount(), 1);
+ OProfiler.getInstance().updateCounter(
+ "Query.compositeIndexUsage." + indexDefinition.getParamCount() + '.' + keyParams.size(), 1);
}
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorMinorEquals.java b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorMinorEquals.java
index d9bed3fc5f4..80cb554f2c0 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorMinorEquals.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/operator/OQueryOperatorMinorEquals.java
@@ -22,6 +22,7 @@
import com.orientechnologies.orient.core.command.OCommandContext;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
+import com.orientechnologies.orient.core.index.OCompositeIndexDefinition;
import com.orientechnologies.orient.core.index.OIndex;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexDefinitionMultiValue;
@@ -90,12 +91,14 @@ public Collection<OIdentifiable> executeIndexQuery(OIndex<?> index, List<Object>
// index that contains key with value field1=1 and which right not included boundary
// is the biggest composite key in the index that contains key with value field1=1 and field2=2.
- final Object keyOne = indexDefinition.createValue(keyParams.subList(0, keyParams.size() - 1));
+ final OCompositeIndexDefinition compositeIndexDefinition = (OCompositeIndexDefinition) indexDefinition;
+
+ final Object keyOne = compositeIndexDefinition.createSingleValue(keyParams.subList(0, keyParams.size() - 1));
if (keyOne == null)
return null;
- final Object keyTwo = indexDefinition.createValue(keyParams);
+ final Object keyTwo = compositeIndexDefinition.createSingleValue(keyParams);
if (keyTwo == null)
return null;
@@ -108,6 +111,8 @@ public Collection<OIdentifiable> executeIndexQuery(OIndex<?> index, List<Object>
if (OProfiler.getInstance().isRecording()) {
OProfiler.getInstance().updateCounter("Query.compositeIndexUsage", 1);
OProfiler.getInstance().updateCounter("Query.compositeIndexUsage." + indexDefinition.getParamCount(), 1);
+ OProfiler.getInstance().updateCounter(
+ "Query.compositeIndexUsage." + indexDefinition.getParamCount() + '.' + keyParams.size(), 1);
}
}
return result;
diff --git a/core/src/test/java/com/orientechnologies/orient/core/index/OCompositeIndexDefinitionTest.java b/core/src/test/java/com/orientechnologies/orient/core/index/OCompositeIndexDefinitionTest.java
index dda2cae1050..c9752799bba 100644
--- a/core/src/test/java/com/orientechnologies/orient/core/index/OCompositeIndexDefinitionTest.java
+++ b/core/src/test/java/com/orientechnologies/orient/core/index/OCompositeIndexDefinitionTest.java
@@ -1,14 +1,24 @@
package com.orientechnologies.orient.core.index;
+import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.orientechnologies.common.collection.OCompositeKey;
+import com.orientechnologies.common.exception.OException;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
+import com.orientechnologies.orient.core.db.record.OMultiValueChangeEvent;
+import com.orientechnologies.orient.core.db.record.OMultiValueChangeListener;
+import com.orientechnologies.orient.core.db.record.OTrackedList;
+import com.orientechnologies.orient.core.db.record.OTrackedMap;
+import com.orientechnologies.orient.core.db.record.OTrackedSet;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.impl.ODocument;
@@ -36,14 +46,97 @@ public void testGetFields() {
@Test
public void testCreateValueSuccessful() {
- final Comparable<?> result = compositeIndex.createValue(Arrays.asList("12", "test"));
+ final Object result = compositeIndex.createValue(Arrays.asList("12", "test"));
Assert.assertEquals(result, new OCompositeKey(Arrays.asList(12, "test")));
}
+ @Test
+ public void testCreateMapValueSuccessful() {
+ final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
+
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
+ compositeIndexDefinition.addIndex(new OPropertyMapIndexDefinition("testCollectionClass", "fTwo", OType.STRING,
+ OPropertyMapIndexDefinition.INDEX_BY.KEY));
+
+ final Map<String, String> stringMap = new HashMap<String, String>();
+ stringMap.put("key1", "val1");
+ stringMap.put("key2", "val2");
+
+ final Object result = compositeIndexDefinition.createValue(12, stringMap);
+
+ final Collection<OCompositeKey> collectionResult = (Collection<OCompositeKey>) result;
+
+ Assert.assertEquals(collectionResult.size(), 2);
+ Assert.assertTrue(collectionResult.contains(new OCompositeKey(12, "key1")));
+ Assert.assertTrue(collectionResult.contains(new OCompositeKey(12, "key2")));
+ }
+
+ @Test
+ public void testCreateCollectionValueSuccessfulOne() {
+ final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
+
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
+ compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.INTEGER));
+
+ final Object result = compositeIndexDefinition.createValue(12, Arrays.asList(1, 2));
+
+ final ArrayList<OCompositeKey> expectedResult = new ArrayList<OCompositeKey>();
+
+ expectedResult.add(new OCompositeKey(12, 1));
+ expectedResult.add(new OCompositeKey(12, 2));
+
+ Assert.assertEquals(result, expectedResult);
+ }
+
+ @Test
+ public void testCreateCollectionValueSuccessfulTwo() {
+ final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
+
+ compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.INTEGER));
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
+
+ final Object result = compositeIndexDefinition.createValue(Arrays.asList(Arrays.asList(1, 2), 12));
+
+ final ArrayList<OCompositeKey> expectedResult = new ArrayList<OCompositeKey>();
+
+ expectedResult.add(new OCompositeKey(1, 12));
+ expectedResult.add(new OCompositeKey(2, 12));
+
+ Assert.assertEquals(result, expectedResult);
+ }
+
+ @Test
+ public void testCreateCollectionValueSuccessfulThree() {
+ final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
+
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
+ compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.INTEGER));
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fThree", OType.STRING));
+
+ final Object result = compositeIndexDefinition.createValue(12, Arrays.asList(1, 2), "test");
+
+ final ArrayList<OCompositeKey> expectedResult = new ArrayList<OCompositeKey>();
+
+ expectedResult.add(new OCompositeKey(12, 1, "test"));
+ expectedResult.add(new OCompositeKey(12, 2, "test"));
+
+ Assert.assertEquals(result, expectedResult);
+ }
+
+ @Test(expectedExceptions = OIndexException.class)
+ public void testCreateCollectionValueTwoCollections() {
+ final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
+
+ compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.INTEGER));
+ compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
+
+ compositeIndexDefinition.createValue(Arrays.asList(1, 2), Arrays.asList(12));
+ }
+
@Test
public void testCreateValueWrongParam() {
- final Comparable<?> result = compositeIndex.createValue(Arrays.asList("1t2", "test"));
+ final Object result = compositeIndex.createValue(Arrays.asList("1t2", "test"));
Assert.assertNull(result);
}
@@ -92,6 +185,113 @@ public void testDocumentToIndexSuccessful() {
Assert.assertEquals(result, new OCompositeKey(Arrays.asList(12, "test")));
}
+ @Test
+ public void testDocumentToIndexMapValueSuccessful() {
+ final ODocument document = new ODocument();
+
+ final Map<String, String> stringMap = new HashMap<String, String>();
+ stringMap.put("key1", "val1");
+ stringMap.put("key2", "val2");
+
+ document.field("fOne", 12);
+ document.field("fTwo", stringMap);
+
+ final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
+
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
+ compositeIndexDefinition.addIndex(new OPropertyMapIndexDefinition("testCollectionClass", "fTwo", OType.STRING,
+ OPropertyMapIndexDefinition.INDEX_BY.KEY));
+
+ final Object result = compositeIndexDefinition.getDocumentValueToIndex(document);
+ final Collection<OCompositeKey> collectionResult = (Collection<OCompositeKey>) result;
+
+ Assert.assertEquals(collectionResult.size(), 2);
+ Assert.assertTrue(collectionResult.contains(new OCompositeKey(12, "key1")));
+ Assert.assertTrue(collectionResult.contains(new OCompositeKey(12, "key2")));
+ }
+
+ @Test
+ public void testDocumentToIndexCollectionValueSuccessfulOne() {
+ final ODocument document = new ODocument();
+
+ document.field("fOne", 12);
+ document.field("fTwo", Arrays.asList(1, 2));
+
+ final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
+
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
+ compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.INTEGER));
+
+ final Object result = compositeIndexDefinition.getDocumentValueToIndex(document);
+
+ final ArrayList<OCompositeKey> expectedResult = new ArrayList<OCompositeKey>();
+
+ expectedResult.add(new OCompositeKey(12, 1));
+ expectedResult.add(new OCompositeKey(12, 2));
+
+ Assert.assertEquals(result, expectedResult);
+ }
+
+ @Test
+ public void testDocumentToIndexCollectionValueSuccessfulTwo() {
+ final ODocument document = new ODocument();
+
+ document.field("fOne", 12);
+ document.field("fTwo", Arrays.asList(1, 2));
+
+ final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
+
+ compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.INTEGER));
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
+
+ final Object result = compositeIndexDefinition.getDocumentValueToIndex(document);
+
+ final ArrayList<OCompositeKey> expectedResult = new ArrayList<OCompositeKey>();
+
+ expectedResult.add(new OCompositeKey(1, 12));
+ expectedResult.add(new OCompositeKey(2, 12));
+
+ Assert.assertEquals(result, expectedResult);
+ }
+
+ @Test
+ public void testDocumentToIndexCollectionValueSuccessfulThree() {
+ final ODocument document = new ODocument();
+
+ document.field("fOne", 12);
+ document.field("fTwo", Arrays.asList(1, 2));
+ document.field("fThree", "test");
+
+ final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
+
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
+ compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.INTEGER));
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fThree", OType.STRING));
+
+ final Object result = compositeIndexDefinition.getDocumentValueToIndex(document);
+
+ final ArrayList<OCompositeKey> expectedResult = new ArrayList<OCompositeKey>();
+
+ expectedResult.add(new OCompositeKey(12, 1, "test"));
+ expectedResult.add(new OCompositeKey(12, 2, "test"));
+
+ Assert.assertEquals(result, expectedResult);
+ }
+
+ @Test(expectedExceptions = OException.class)
+ public void testDocumentToIndexCollectionValueTwoCollections() {
+ final ODocument document = new ODocument();
+
+ document.field("fOne", Arrays.asList(12));
+ document.field("fTwo", Arrays.asList(1, 2));
+
+ final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition("testCollectionClass");
+
+ compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
+ compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.INTEGER));
+ compositeIndexDefinition.getDocumentValueToIndex(document);
+ }
+
@Test
public void testDocumentToIndexWrongField() {
final ODocument document = new ODocument();
@@ -179,6 +379,246 @@ public void testClassOnlyConstructor() {
Assert.assertEquals(result, emptyCompositeIndexTwo);
}
+ public void testProcessChangeListEventsOne() {
+ final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition();
+
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
+ compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.STRING));
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fThree", OType.INTEGER));
+
+ final ODocument doc = new ODocument();
+ doc.unsetDirty();
+ Assert.assertFalse(doc.isDirty());
+
+ final OTrackedList<String> trackedList = new OTrackedList<String>(doc);
+ final List<OMultiValueChangeEvent<Integer, String>> firedEvents = new ArrayList<OMultiValueChangeEvent<Integer, String>>();
+
+ trackedList.addChangeListener(new OMultiValueChangeListener<Integer, String>() {
+ public void onAfterRecordChanged(final OMultiValueChangeEvent<Integer, String> event) {
+ firedEvents.add(event);
+ }
+ });
+
+ trackedList.add("l1");
+ trackedList.add("l2");
+ trackedList.add("l3");
+ trackedList.remove("l2");
+
+ Map<OCompositeKey, Integer> keysToAdd = new HashMap<OCompositeKey, Integer>();
+ Map<OCompositeKey, Integer> keysToRemove = new HashMap<OCompositeKey, Integer>();
+
+ for (OMultiValueChangeEvent<Integer, String> multiValueChangeEvent : firedEvents)
+ compositeIndexDefinition.processChangeEvent(multiValueChangeEvent, keysToAdd, keysToRemove, 2, 3);
+
+ Assert.assertEquals(keysToRemove.size(), 0);
+ Assert.assertEquals(keysToAdd.size(), 2);
+
+ Assert.assertTrue(keysToAdd.containsKey(new OCompositeKey(2, "l1", 3)));
+ Assert.assertTrue(keysToAdd.containsKey(new OCompositeKey(2, "l3", 3)));
+ }
+
+ public void testProcessChangeListEventsTwo() {
+ final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition();
+
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
+ compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.STRING));
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fThree", OType.INTEGER));
+
+ final ODocument doc = new ODocument();
+ doc.unsetDirty();
+ Assert.assertFalse(doc.isDirty());
+
+ final OTrackedList<String> trackedList = new OTrackedList<String>(doc);
+ final List<OMultiValueChangeEvent<Integer, String>> firedEvents = new ArrayList<OMultiValueChangeEvent<Integer, String>>();
+
+ trackedList.add("l1");
+ trackedList.add("l2");
+ trackedList.add("l3");
+ trackedList.remove("l2");
+
+ trackedList.addChangeListener(new OMultiValueChangeListener<Integer, String>() {
+ public void onAfterRecordChanged(final OMultiValueChangeEvent<Integer, String> event) {
+ firedEvents.add(event);
+ }
+ });
+
+ trackedList.add("l4");
+ trackedList.remove("l1");
+
+ Map<OCompositeKey, Integer> keysToAdd = new HashMap<OCompositeKey, Integer>();
+ Map<OCompositeKey, Integer> keysToRemove = new HashMap<OCompositeKey, Integer>();
+
+ for (OMultiValueChangeEvent<Integer, String> multiValueChangeEvent : firedEvents)
+ compositeIndexDefinition.processChangeEvent(multiValueChangeEvent, keysToAdd, keysToRemove, 2, 3);
+
+ Assert.assertEquals(keysToRemove.size(), 1);
+ Assert.assertEquals(keysToAdd.size(), 1);
+
+ Assert.assertTrue(keysToAdd.containsKey(new OCompositeKey(2, "l4", 3)));
+ Assert.assertTrue(keysToRemove.containsKey(new OCompositeKey(2, "l1", 3)));
+ }
+
+ public void testProcessChangeSetEventsOne() {
+ final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition();
+
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
+ compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.STRING));
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fThree", OType.INTEGER));
+
+ final ODocument doc = new ODocument();
+ doc.unsetDirty();
+ Assert.assertFalse(doc.isDirty());
+
+ final OTrackedSet<String> trackedSet = new OTrackedSet<String>(doc);
+ final List<OMultiValueChangeEvent<String, String>> firedEvents = new ArrayList<OMultiValueChangeEvent<String, String>>();
+
+ trackedSet.addChangeListener(new OMultiValueChangeListener<String, String>() {
+ public void onAfterRecordChanged(final OMultiValueChangeEvent<String, String> event) {
+ firedEvents.add(event);
+ }
+ });
+
+ trackedSet.add("l1");
+ trackedSet.add("l2");
+ trackedSet.add("l3");
+ trackedSet.remove("l2");
+
+ Map<OCompositeKey, Integer> keysToAdd = new HashMap<OCompositeKey, Integer>();
+ Map<OCompositeKey, Integer> keysToRemove = new HashMap<OCompositeKey, Integer>();
+
+ for (OMultiValueChangeEvent<String, String> multiValueChangeEvent : firedEvents)
+ compositeIndexDefinition.processChangeEvent(multiValueChangeEvent, keysToAdd, keysToRemove, 2, 3);
+
+ Assert.assertEquals(keysToRemove.size(), 0);
+ Assert.assertEquals(keysToAdd.size(), 2);
+
+ Assert.assertTrue(keysToAdd.containsKey(new OCompositeKey(2, "l1", 3)));
+ Assert.assertTrue(keysToAdd.containsKey(new OCompositeKey(2, "l3", 3)));
+ }
+
+ public void testProcessChangeSetEventsTwo() {
+ final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition();
+
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
+ compositeIndexDefinition.addIndex(new OPropertyListIndexDefinition("testCollectionClass", "fTwo", OType.STRING));
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fThree", OType.INTEGER));
+
+ final ODocument doc = new ODocument();
+ doc.unsetDirty();
+ Assert.assertFalse(doc.isDirty());
+
+ final OTrackedSet<String> trackedMap = new OTrackedSet<String>(doc);
+ final List<OMultiValueChangeEvent<String, String>> firedEvents = new ArrayList<OMultiValueChangeEvent<String, String>>();
+
+ trackedMap.add("l1");
+ trackedMap.add("l2");
+ trackedMap.add("l3");
+ trackedMap.remove("l2");
+
+ trackedMap.addChangeListener(new OMultiValueChangeListener<String, String>() {
+ public void onAfterRecordChanged(final OMultiValueChangeEvent<String, String> event) {
+ firedEvents.add(event);
+ }
+ });
+
+ trackedMap.add("l4");
+ trackedMap.remove("l1");
+
+ Map<OCompositeKey, Integer> keysToAdd = new HashMap<OCompositeKey, Integer>();
+ Map<OCompositeKey, Integer> keysToRemove = new HashMap<OCompositeKey, Integer>();
+
+ for (OMultiValueChangeEvent<String, String> multiValueChangeEvent : firedEvents)
+ compositeIndexDefinition.processChangeEvent(multiValueChangeEvent, keysToAdd, keysToRemove, 2, 3);
+
+ Assert.assertEquals(keysToRemove.size(), 1);
+ Assert.assertEquals(keysToAdd.size(), 1);
+
+ Assert.assertTrue(keysToAdd.containsKey(new OCompositeKey(2, "l4", 3)));
+ Assert.assertTrue(keysToRemove.containsKey(new OCompositeKey(2, "l1", 3)));
+ }
+
+ public void testProcessChangeKeyMapEventsOne() {
+ final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition();
+
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
+ compositeIndexDefinition.addIndex(new OPropertyMapIndexDefinition("testCollectionClass", "fTwo", OType.STRING,
+ OPropertyMapIndexDefinition.INDEX_BY.KEY));
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fThree", OType.INTEGER));
+
+ final ODocument doc = new ODocument();
+ doc.unsetDirty();
+ Assert.assertFalse(doc.isDirty());
+
+ final OTrackedMap<String> trackedMap = new OTrackedMap<String>(doc);
+ final List<OMultiValueChangeEvent<Object, String>> firedEvents = new ArrayList<OMultiValueChangeEvent<Object, String>>();
+
+ trackedMap.addChangeListener(new OMultiValueChangeListener<Object, String>() {
+ public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) {
+ firedEvents.add(event);
+ }
+ });
+
+ trackedMap.put("k1", "v1");
+ trackedMap.put("k2", "v2");
+ trackedMap.put("k3", "v3");
+ trackedMap.remove("k2");
+
+ Map<OCompositeKey, Integer> keysToAdd = new HashMap<OCompositeKey, Integer>();
+ Map<OCompositeKey, Integer> keysToRemove = new HashMap<OCompositeKey, Integer>();
+
+ for (OMultiValueChangeEvent<Object, String> multiValueChangeEvent : firedEvents)
+ compositeIndexDefinition.processChangeEvent(multiValueChangeEvent, keysToAdd, keysToRemove, 2, 3);
+
+ Assert.assertEquals(keysToRemove.size(), 0);
+ Assert.assertEquals(keysToAdd.size(), 2);
+
+ Assert.assertTrue(keysToAdd.containsKey(new OCompositeKey(2, "k1", 3)));
+ Assert.assertTrue(keysToAdd.containsKey(new OCompositeKey(2, "k3", 3)));
+ }
+
+ public void testProcessChangeKeyMapEventsTwo() {
+ final OCompositeIndexDefinition compositeIndexDefinition = new OCompositeIndexDefinition();
+
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fOne", OType.INTEGER));
+ compositeIndexDefinition.addIndex(new OPropertyMapIndexDefinition("testCollectionClass", "fTwo", OType.STRING,
+ OPropertyMapIndexDefinition.INDEX_BY.KEY));
+ compositeIndexDefinition.addIndex(new OPropertyIndexDefinition("testCollectionClass", "fThree", OType.INTEGER));
+
+ final ODocument doc = new ODocument();
+ doc.unsetDirty();
+ Assert.assertFalse(doc.isDirty());
+
+ final OTrackedMap<String> trackedMap = new OTrackedMap<String>(doc);
+
+ trackedMap.put("k1", "v1");
+ trackedMap.put("k2", "v2");
+ trackedMap.put("k3", "v3");
+ trackedMap.remove("k2");
+
+ final List<OMultiValueChangeEvent<Object, String>> firedEvents = new ArrayList<OMultiValueChangeEvent<Object, String>>();
+
+ trackedMap.addChangeListener(new OMultiValueChangeListener<Object, String>() {
+ public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) {
+ firedEvents.add(event);
+ }
+ });
+
+ trackedMap.put("k4", "v4");
+ trackedMap.remove("k1");
+
+ Map<OCompositeKey, Integer> keysToAdd = new HashMap<OCompositeKey, Integer>();
+ Map<OCompositeKey, Integer> keysToRemove = new HashMap<OCompositeKey, Integer>();
+
+ for (OMultiValueChangeEvent<Object, String> multiValueChangeEvent : firedEvents)
+ compositeIndexDefinition.processChangeEvent(multiValueChangeEvent, keysToAdd, keysToRemove, 2, 3);
+
+ Assert.assertEquals(keysToRemove.size(), 1);
+ Assert.assertEquals(keysToAdd.size(), 1);
+
+ Assert.assertTrue(keysToAdd.containsKey(new OCompositeKey(2, "k4", 3)));
+ Assert.assertTrue(keysToRemove.containsKey(new OCompositeKey(2, "k1", 3)));
+ }
+
@Test
public void testClassName() {
Assert.assertEquals("testClass", compositeIndex.getClassName());
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/ClassIndexManagerTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/ClassIndexManagerTest.java
index f6a09dfbf0d..4cf2f5fa933 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/ClassIndexManagerTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/ClassIndexManagerTest.java
@@ -1,6 +1,7 @@
package com.orientechnologies.orient.test.database.auto;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
@@ -16,6 +17,7 @@
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
+import com.orientechnologies.common.collection.OCompositeKey;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.index.OIndex;
import com.orientechnologies.orient.core.index.OIndexDefinition;
@@ -29,839 +31,1275 @@
@Test(groups = { "index" })
public class ClassIndexManagerTest {
- private final ODatabaseDocumentTx database;
-
- @Parameters(value = "url")
- public ClassIndexManagerTest(final String iURL) {
- database = new ODatabaseDocumentTx(iURL);
- }
-
- @BeforeClass
- public void beforeClass() {
- if (database.isClosed())
- database.open("admin", "admin");
-
- final OSchema schema = database.getMetadata().getSchema();
- final OClass superClass = schema.createClass("classIndexManagerTestSuperClass");
- final OProperty propertyZero = superClass.createProperty("prop0", OType.STRING);
- propertyZero.createIndex(OClass.INDEX_TYPE.UNIQUE);
-
- final OClass oClass = schema.createClass("classIndexManagerTestClass", superClass);
- final OProperty propOne = oClass.createProperty("prop1", OType.STRING);
- propOne.createIndex(OClass.INDEX_TYPE.UNIQUE);
-
- final OProperty propTwo = oClass.createProperty("prop2", OType.INTEGER);
- propTwo.createIndex(OClass.INDEX_TYPE.NOTUNIQUE);
-
- oClass.createProperty("prop3", OType.BOOLEAN);
-
- final OProperty propFour = oClass.createProperty("prop4", OType.EMBEDDEDLIST, OType.STRING);
- propFour.createIndex(OClass.INDEX_TYPE.NOTUNIQUE);
-
- oClass.createProperty("prop5", OType.EMBEDDEDMAP, OType.STRING);
- oClass.createIndex("classIndexManagerTestIndexByKey", OClass.INDEX_TYPE.NOTUNIQUE, "prop5");
- oClass.createIndex("classIndexManagerTestIndexByValue", OClass.INDEX_TYPE.NOTUNIQUE, "prop5 by value");
-
- final OProperty propSix = oClass.createProperty("prop6", OType.EMBEDDEDSET, OType.STRING);
- propSix.createIndex(OClass.INDEX_TYPE.NOTUNIQUE);
-
- oClass.createIndex("classIndexManagerComposite", OClass.INDEX_TYPE.UNIQUE, "prop1", "prop2");
-
- final OClass oClassTwo = schema.createClass("classIndexManagerTestClassTwo");
- oClassTwo.createProperty("prop1", OType.STRING);
- oClassTwo.createProperty("prop2", OType.INTEGER);
-
- schema.save();
-
- database.close();
- }
-
- @BeforeMethod
- public void beforeMethod() {
- if (database.isClosed())
- database.open("admin", "admin");
- }
-
- @AfterMethod
- public void afterMethod() {
- database.command(new OCommandSQL("delete from classIndexManagerTestClass")).execute();
- database.command(new OCommandSQL("delete from classIndexManagerTestClassTwo")).execute();
- database.command(new OCommandSQL("delete from classIndexManagerTestSuperClass")).execute();
- database.close();
- }
-
- @AfterClass
- public void afterClass() {
- if (database.isClosed())
- database.open("admin", "admin");
- database.command(new OCommandSQL("drop class classIndexManagerTestClass")).execute();
- database.command(new OCommandSQL("drop class classIndexManagerTestClassTwo")).execute();
- database.command(new OCommandSQL("drop class classIndexManagerTestSuperClass")).execute();
- database.getMetadata().getSchema().reload();
- database.getLevel2Cache().clear();
- database.close();
- }
-
- public void testPropertiesCheckUniqueIndexDubKeysCreate() {
- final ODocument docOne = new ODocument("classIndexManagerTestClass");
- final ODocument docTwo = new ODocument("classIndexManagerTestClass");
-
- docOne.field("prop1", "a");
- docOne.save();
-
- boolean exceptionThrown = false;
- try {
- docTwo.field("prop1", "a");
- docTwo.save();
- } catch (OIndexException e) {
- exceptionThrown = true;
- }
- Assert.assertTrue(exceptionThrown);
- }
-
- public void testPropertiesCheckUniqueIndexDubKeyIsNullCreate() {
- final ODocument docOne = new ODocument("classIndexManagerTestClass");
- final ODocument docTwo = new ODocument("classIndexManagerTestClass");
-
- docOne.field("prop1", "a");
- docOne.save();
-
- docTwo.field("prop1", (String)null);
- docTwo.save();
- }
-
- public void testPropertiesCheckUniqueIndexDubKeyIsNullCreateInTx() {
- final ODocument docOne = new ODocument("classIndexManagerTestClass");
- final ODocument docTwo = new ODocument("classIndexManagerTestClass");
-
- database.begin();
- docOne.field("prop1", "a");
- docOne.save();
-
- docTwo.field("prop1", (String)null);
- docTwo.save();
- database.commit();
- }
-
- public void testPropertiesCheckUniqueIndexInParentDubKeysCreate() {
- final ODocument docOne = new ODocument("classIndexManagerTestClass");
- final ODocument docTwo = new ODocument("classIndexManagerTestClass");
-
- docOne.field("prop0", "a");
- docOne.save();
-
- boolean exceptionThrown = false;
- try {
- docTwo.field("prop0", "a");
- docTwo.save();
- } catch (OIndexException e) {
- exceptionThrown = true;
- }
- Assert.assertTrue(exceptionThrown);
- }
-
- public void testPropertiesCheckUniqueIndexDubKeysUpdate() {
- final ODocument docOne = new ODocument("classIndexManagerTestClass");
- final ODocument docTwo = new ODocument("classIndexManagerTestClass");
-
- boolean exceptionThrown = false;
- docOne.field("prop1", "a");
- docOne.save();
-
- docTwo.field("prop1", "b");
- docTwo.save();
-
- try {
- docTwo.field("prop1", "a");
- docTwo.save();
- } catch (OIndexException e) {
- exceptionThrown = true;
- }
- Assert.assertTrue(exceptionThrown);
- }
-
- public void testPropertiesCheckUniqueIndexDubKeyIsNullUpdate() {
- final ODocument docOne = new ODocument("classIndexManagerTestClass");
- final ODocument docTwo = new ODocument("classIndexManagerTestClass");
-
- docOne.field("prop1", "a");
- docOne.save();
-
- docTwo.field("prop1", "b");
- docTwo.save();
-
- docTwo.field("prop1", (String)null);
- docTwo.save();
- }
-
- public void testPropertiesCheckUniqueIndexDubKeyIsNullUpdateInTX() {
- final ODocument docOne = new ODocument("classIndexManagerTestClass");
- final ODocument docTwo = new ODocument("classIndexManagerTestClass");
-
- database.begin();
- docOne.field("prop1", "a");
- docOne.save();
-
- docTwo.field("prop1", "b");
- docTwo.save();
-
- docTwo.field("prop1", (String)null);
- docTwo.save();
- database.commit();
- }
-
- public void testPropertiesCheckNonUniqueIndexDubKeys() {
- final ODocument docOne = new ODocument("classIndexManagerTestClass");
- docOne.field("prop2", 1);
- docOne.save();
-
- final ODocument docTwo = new ODocument("classIndexManagerTestClass");
- docTwo.field("prop2", 1);
- docTwo.save();
- }
-
- public void testPropertiesCheckUniqueNullKeys() {
- final ODocument docOne = new ODocument("classIndexManagerTestClass");
- docOne.field("prop3", "a");
- docOne.save();
-
- final ODocument docTwo = new ODocument("classIndexManagerTestClass");
- docTwo.field("prop3", "a");
- docTwo.save();
- }
-
- public void testCreateDocumentWithoutClass() {
- final Collection<? extends OIndex<?>> beforeIndexes = database.getMetadata().getIndexManager().getIndexes();
- final Map<String, Long> indexSizeMap = new HashMap<String, Long>();
-
- for (final OIndex<?> index : beforeIndexes)
- indexSizeMap.put(index.getName(), index.getSize());
-
- final ODocument docOne = new ODocument();
- docOne.field("prop1", "a");
- docOne.save();
-
- final ODocument docTwo = new ODocument();
- docTwo.field("prop1", "a");
- docTwo.save();
-
- final Collection<? extends OIndex<?>> afterIndexes = database.getMetadata().getIndexManager().getIndexes();
- for (final OIndex<?> index : afterIndexes)
- Assert.assertEquals(index.getSize(), indexSizeMap.get(index.getName()).longValue());
- }
-
- public void testUpdateDocumentWithoutClass() {
- final Collection<? extends OIndex<?>> beforeIndexes = database.getMetadata().getIndexManager().getIndexes();
- final Map<String, Long> indexSizeMap = new HashMap<String, Long>();
-
- for (final OIndex<?> index : beforeIndexes)
- indexSizeMap.put(index.getName(), index.getSize());
-
- final ODocument docOne = new ODocument();
- docOne.field("prop1", "a");
- docOne.save();
-
- final ODocument docTwo = new ODocument();
- docTwo.field("prop1", "b");
- docTwo.save();
+ private final ODatabaseDocumentTx database;
+
+ @Parameters(value = "url")
+ public ClassIndexManagerTest(final String iURL) {
+ database = new ODatabaseDocumentTx(iURL);
+ }
+
+ @BeforeClass
+ public void beforeClass() {
+ if (database.isClosed())
+ database.open("admin", "admin");
+
+ final OSchema schema = database.getMetadata().getSchema();
+ final OClass superClass = schema.createClass("classIndexManagerTestSuperClass");
+ final OProperty propertyZero = superClass.createProperty("prop0", OType.STRING);
+ propertyZero.createIndex(OClass.INDEX_TYPE.UNIQUE);
+
+ final OClass oClass = schema.createClass("classIndexManagerTestClass", superClass);
+ final OProperty propOne = oClass.createProperty("prop1", OType.STRING);
+ propOne.createIndex(OClass.INDEX_TYPE.UNIQUE);
+
+ final OProperty propTwo = oClass.createProperty("prop2", OType.INTEGER);
+ propTwo.createIndex(OClass.INDEX_TYPE.NOTUNIQUE);
+
+ oClass.createProperty("prop3", OType.BOOLEAN);
+
+ final OProperty propFour = oClass.createProperty("prop4", OType.EMBEDDEDLIST, OType.STRING);
+ propFour.createIndex(OClass.INDEX_TYPE.NOTUNIQUE);
+
+ oClass.createProperty("prop5", OType.EMBEDDEDMAP, OType.STRING);
+ oClass.createIndex("classIndexManagerTestIndexByKey", OClass.INDEX_TYPE.NOTUNIQUE, "prop5");
+ oClass.createIndex("classIndexManagerTestIndexByValue", OClass.INDEX_TYPE.NOTUNIQUE, "prop5 by value");
+
+ final OProperty propSix = oClass.createProperty("prop6", OType.EMBEDDEDSET, OType.STRING);
+ propSix.createIndex(OClass.INDEX_TYPE.NOTUNIQUE);
+
+ oClass.createIndex("classIndexManagerComposite", OClass.INDEX_TYPE.UNIQUE, "prop1", "prop2");
+
+ final OClass oClassTwo = schema.createClass("classIndexManagerTestClassTwo");
+ oClassTwo.createProperty("prop1", OType.STRING);
+ oClassTwo.createProperty("prop2", OType.INTEGER);
+
+ final OClass compositeCollectionClass = schema.createClass("classIndexManagerTestCompositeCollectionClass");
+ compositeCollectionClass.createProperty("prop1", OType.STRING);
+ compositeCollectionClass.createProperty("prop2", OType.EMBEDDEDLIST, OType.INTEGER);
+
+ compositeCollectionClass
+ .createIndex("classIndexManagerTestIndexValueAndCollection", OClass.INDEX_TYPE.UNIQUE, "prop1", "prop2");
+
+ schema.save();
+
+ database.close();
+ }
+
+ @BeforeMethod
+ public void beforeMethod() {
+ if (database.isClosed())
+ database.open("admin", "admin");
+ }
+
+ @AfterMethod
+ public void afterMethod() {
+ database.command(new OCommandSQL("delete from classIndexManagerTestClass")).execute();
+ database.command(new OCommandSQL("delete from classIndexManagerTestClassTwo")).execute();
+ database.command(new OCommandSQL("delete from classIndexManagerTestSuperClass")).execute();
+ database.close();
+ }
+
+ @AfterClass
+ public void afterClass() {
+ if (database.isClosed())
+ database.open("admin", "admin");
+ database.command(new OCommandSQL("drop class classIndexManagerTestClass")).execute();
+ database.command(new OCommandSQL("drop class classIndexManagerTestClassTwo")).execute();
+ database.command(new OCommandSQL("drop class classIndexManagerTestSuperClass")).execute();
+ database.getMetadata().getSchema().reload();
+ database.getLevel2Cache().clear();
+ database.close();
+ }
+
+ public void testPropertiesCheckUniqueIndexDubKeysCreate() {
+ final ODocument docOne = new ODocument("classIndexManagerTestClass");
+ final ODocument docTwo = new ODocument("classIndexManagerTestClass");
+
+ docOne.field("prop1", "a");
+ docOne.save();
+
+ boolean exceptionThrown = false;
+ try {
+ docTwo.field("prop1", "a");
+ docTwo.save();
+ } catch (OIndexException e) {
+ exceptionThrown = true;
+ }
+ Assert.assertTrue(exceptionThrown);
+ }
+
+ public void testPropertiesCheckUniqueIndexDubKeyIsNullCreate() {
+ final ODocument docOne = new ODocument("classIndexManagerTestClass");
+ final ODocument docTwo = new ODocument("classIndexManagerTestClass");
+
+ docOne.field("prop1", "a");
+ docOne.save();
+
+ docTwo.field("prop1", (String) null);
+ docTwo.save();
+ }
+
+ public void testPropertiesCheckUniqueIndexDubKeyIsNullCreateInTx() {
+ final ODocument docOne = new ODocument("classIndexManagerTestClass");
+ final ODocument docTwo = new ODocument("classIndexManagerTestClass");
+
+ database.begin();
+ docOne.field("prop1", "a");
+ docOne.save();
+
+ docTwo.field("prop1", (String) null);
+ docTwo.save();
+ database.commit();
+ }
+
+ public void testPropertiesCheckUniqueIndexInParentDubKeysCreate() {
+ final ODocument docOne = new ODocument("classIndexManagerTestClass");
+ final ODocument docTwo = new ODocument("classIndexManagerTestClass");
+
+ docOne.field("prop0", "a");
+ docOne.save();
+
+ boolean exceptionThrown = false;
+ try {
+ docTwo.field("prop0", "a");
+ docTwo.save();
+ } catch (OIndexException e) {
+ exceptionThrown = true;
+ }
+ Assert.assertTrue(exceptionThrown);
+ }
+
+ public void testPropertiesCheckUniqueIndexDubKeysUpdate() {
+ final ODocument docOne = new ODocument("classIndexManagerTestClass");
+ final ODocument docTwo = new ODocument("classIndexManagerTestClass");
+
+ boolean exceptionThrown = false;
+ docOne.field("prop1", "a");
+ docOne.save();
+
+ docTwo.field("prop1", "b");
+ docTwo.save();
+
+ try {
+ docTwo.field("prop1", "a");
+ docTwo.save();
+ } catch (OIndexException e) {
+ exceptionThrown = true;
+ }
+ Assert.assertTrue(exceptionThrown);
+ }
+
+ public void testPropertiesCheckUniqueIndexDubKeyIsNullUpdate() {
+ final ODocument docOne = new ODocument("classIndexManagerTestClass");
+ final ODocument docTwo = new ODocument("classIndexManagerTestClass");
+
+ docOne.field("prop1", "a");
+ docOne.save();
+
+ docTwo.field("prop1", "b");
+ docTwo.save();
+
+ docTwo.field("prop1", (String) null);
+ docTwo.save();
+ }
+
+ public void testPropertiesCheckUniqueIndexDubKeyIsNullUpdateInTX() {
+ final ODocument docOne = new ODocument("classIndexManagerTestClass");
+ final ODocument docTwo = new ODocument("classIndexManagerTestClass");
+
+ database.begin();
+ docOne.field("prop1", "a");
+ docOne.save();
+
+ docTwo.field("prop1", "b");
+ docTwo.save();
+
+ docTwo.field("prop1", (String) null);
+ docTwo.save();
+ database.commit();
+ }
+
+ public void testPropertiesCheckNonUniqueIndexDubKeys() {
+ final ODocument docOne = new ODocument("classIndexManagerTestClass");
+ docOne.field("prop2", 1);
+ docOne.save();
+
+ final ODocument docTwo = new ODocument("classIndexManagerTestClass");
+ docTwo.field("prop2", 1);
+ docTwo.save();
+ }
+
+ public void testPropertiesCheckUniqueNullKeys() {
+ final ODocument docOne = new ODocument("classIndexManagerTestClass");
+ docOne.field("prop3", "a");
+ docOne.save();
+
+ final ODocument docTwo = new ODocument("classIndexManagerTestClass");
+ docTwo.field("prop3", "a");
+ docTwo.save();
+ }
+
+ public void testCreateDocumentWithoutClass() {
+ final Collection<? extends OIndex<?>> beforeIndexes = database.getMetadata().getIndexManager().getIndexes();
+ final Map<String, Long> indexSizeMap = new HashMap<String, Long>();
+
+ for (final OIndex<?> index : beforeIndexes)
+ indexSizeMap.put(index.getName(), index.getSize());
+
+ final ODocument docOne = new ODocument();
+ docOne.field("prop1", "a");
+ docOne.save();
+
+ final ODocument docTwo = new ODocument();
+ docTwo.field("prop1", "a");
+ docTwo.save();
+
+ final Collection<? extends OIndex<?>> afterIndexes = database.getMetadata().getIndexManager().getIndexes();
+ for (final OIndex<?> index : afterIndexes)
+ Assert.assertEquals(index.getSize(), indexSizeMap.get(index.getName()).longValue());
+ }
+
+ public void testUpdateDocumentWithoutClass() {
+ final Collection<? extends OIndex<?>> beforeIndexes = database.getMetadata().getIndexManager().getIndexes();
+ final Map<String, Long> indexSizeMap = new HashMap<String, Long>();
+
+ for (final OIndex<?> index : beforeIndexes)
+ indexSizeMap.put(index.getName(), index.getSize());
+
+ final ODocument docOne = new ODocument();
+ docOne.field("prop1", "a");
+ docOne.save();
+
+ final ODocument docTwo = new ODocument();
+ docTwo.field("prop1", "b");
+ docTwo.save();
- docOne.field("prop1", "a");
- docOne.save();
+ docOne.field("prop1", "a");
+ docOne.save();
- final Collection<? extends OIndex<?>> afterIndexes = database.getMetadata().getIndexManager().getIndexes();
- for (final OIndex<?> index : afterIndexes)
- Assert.assertEquals(index.getSize(), indexSizeMap.get(index.getName()).longValue());
- }
+ final Collection<? extends OIndex<?>> afterIndexes = database.getMetadata().getIndexManager().getIndexes();
+ for (final OIndex<?> index : afterIndexes)
+ Assert.assertEquals(index.getSize(), indexSizeMap.get(index.getName()).longValue());
+ }
- public void testDeleteDocumentWithoutClass() {
- final ODocument docOne = new ODocument();
- docOne.field("prop1", "a");
- docOne.save();
+ public void testDeleteDocumentWithoutClass() {
+ final ODocument docOne = new ODocument();
+ docOne.field("prop1", "a");
+ docOne.save();
- docOne.delete();
- }
+ docOne.delete();
+ }
- public void testDeleteModifiedDocumentWithoutClass() {
- final ODocument docOne = new ODocument();
- docOne.field("prop1", "a");
- docOne.save();
+ public void testDeleteModifiedDocumentWithoutClass() {
+ final ODocument docOne = new ODocument();
+ docOne.field("prop1", "a");
+ docOne.save();
- docOne.field("prop1", "b");
+ docOne.field("prop1", "b");
- docOne.delete();
- }
-
- public void testDocumentUpdateWithoutDirtyFields() {
- final ODocument docOne = new ODocument("classIndexManagerTestClass");
- docOne.field("prop1", "a");
- docOne.save();
+ docOne.delete();
+ }
+
+ public void testDocumentUpdateWithoutDirtyFields() {
+ final ODocument docOne = new ODocument("classIndexManagerTestClass");
+ docOne.field("prop1", "a");
+ docOne.save();
- docOne.setDirty();
- docOne.save();
- }
+ docOne.setDirty();
+ docOne.save();
+ }
- public void testCreateDocumentIndexRecordAdded() {
- final ODocument doc = new ODocument("classIndexManagerTestClass");
- doc.field("prop0", "x");
- doc.field("prop1", "a");
- doc.field("prop2", 1);
-
- doc.save();
+ public void testCreateDocumentIndexRecordAdded() {
+ final ODocument doc = new ODocument("classIndexManagerTestClass");
+ doc.field("prop0", "x");
+ doc.field("prop1", "a");
+ doc.field("prop2", 1);
+
+ doc.save();
- final OSchema schema = database.getMetadata().getSchema();
- final OClass oClass = schema.getClass("classIndexManagerTestClass");
- final OClass oSuperClass = schema.getClass("classIndexManagerTestSuperClass");
+ final OSchema schema = database.getMetadata().getSchema();
+ final OClass oClass = schema.getClass("classIndexManagerTestClass");
+ final OClass oSuperClass = schema.getClass("classIndexManagerTestSuperClass");
- final OIndex<?> propOneIndex = oClass.getClassIndex("classIndexManagerTestClass.prop1");
- Assert.assertNotNull(propOneIndex.get("a"));
- Assert.assertEquals(propOneIndex.getSize(), 1);
+ final OIndex<?> propOneIndex = oClass.getClassIndex("classIndexManagerTestClass.prop1");
+ Assert.assertNotNull(propOneIndex.get("a"));
+ Assert.assertEquals(propOneIndex.getSize(), 1);
- final OIndex<?> compositeIndex = oClass.getClassIndex("classIndexManagerComposite");
+ final OIndex<?> compositeIndex = oClass.getClassIndex("classIndexManagerComposite");
- final OIndexDefinition compositeIndexDefinition = compositeIndex.getDefinition();
- Assert.assertNotNull(compositeIndex.get(compositeIndexDefinition.createValue("a", 1)));
- Assert.assertEquals(compositeIndex.getSize(), 1);
+ final OIndexDefinition compositeIndexDefinition = compositeIndex.getDefinition();
+ Assert.assertNotNull(compositeIndex.get(compositeIndexDefinition.createValue("a", 1)));
+ Assert.assertEquals(compositeIndex.getSize(), 1);
- final OIndex<?> propZeroIndex = oSuperClass.getClassIndex("classIndexManagerTestSuperClass.prop0");
- Assert.assertNotNull(propZeroIndex.get("x"));
- Assert.assertEquals(propZeroIndex.getSize(), 1);
- }
+ final OIndex<?> propZeroIndex = oSuperClass.getClassIndex("classIndexManagerTestSuperClass.prop0");
+ Assert.assertNotNull(propZeroIndex.get("x"));
+ Assert.assertEquals(propZeroIndex.getSize(), 1);
+ }
- public void testUpdateDocumentIndexRecordRemoved() {
- final ODocument doc = new ODocument("classIndexManagerTestClass");
- doc.field("prop0", "x");
- doc.field("prop1", "a");
- doc.field("prop2", 1);
+ public void testUpdateDocumentIndexRecordRemoved() {
+ final ODocument doc = new ODocument("classIndexManagerTestClass");
+ doc.field("prop0", "x");
+ doc.field("prop1", "a");
+ doc.field("prop2", 1);
- doc.save();
+ doc.save();
- final OSchema schema = database.getMetadata().getSchema();
- final OClass oSuperClass = schema.getClass("classIndexManagerTestSuperClass");
- final OClass oClass = schema.getClass("classIndexManagerTestClass");
+ final OSchema schema = database.getMetadata().getSchema();
+ final OClass oSuperClass = schema.getClass("classIndexManagerTestSuperClass");
+ final OClass oClass = schema.getClass("classIndexManagerTestClass");
- final OIndex<?> propOneIndex = oClass.getClassIndex("classIndexManagerTestClass.prop1");
- final OIndex<?> compositeIndex = oClass.getClassIndex("classIndexManagerComposite");
- final OIndex<?> propZeroIndex = oSuperClass.getClassIndex("classIndexManagerTestSuperClass.prop0");
+ final OIndex<?> propOneIndex = oClass.getClassIndex("classIndexManagerTestClass.prop1");
+ final OIndex<?> compositeIndex = oClass.getClassIndex("classIndexManagerComposite");
+ final OIndex<?> propZeroIndex = oSuperClass.getClassIndex("classIndexManagerTestSuperClass.prop0");
- Assert.assertEquals(propOneIndex.getSize(), 1);
- Assert.assertEquals(compositeIndex.getSize(), 1);
- Assert.assertEquals(propZeroIndex.getSize(), 1);
+ Assert.assertEquals(propOneIndex.getSize(), 1);
+ Assert.assertEquals(compositeIndex.getSize(), 1);
+ Assert.assertEquals(propZeroIndex.getSize(), 1);
- doc.removeField("prop2");
- doc.removeField("prop0");
- doc.save();
+ doc.removeField("prop2");
+ doc.removeField("prop0");
+ doc.save();
- Assert.assertEquals(propOneIndex.getSize(), 1);
- Assert.assertEquals(compositeIndex.getSize(), 0);
- Assert.assertEquals(propZeroIndex.getSize(), 0);
- }
+ Assert.assertEquals(propOneIndex.getSize(), 1);
+ Assert.assertEquals(compositeIndex.getSize(), 0);
+ Assert.assertEquals(propZeroIndex.getSize(), 0);
+ }
- public void testUpdateDocumentNullKeyIndexRecordRemoved() {
- final ODocument doc = new ODocument("classIndexManagerTestClass");
+ public void testUpdateDocumentNullKeyIndexRecordRemoved() {
+ final ODocument doc = new ODocument("classIndexManagerTestClass");
- doc.field("prop0", "x");
- doc.field("prop1", "a");
- doc.field("prop2", 1);
+ doc.field("prop0", "x");
+ doc.field("prop1", "a");
+ doc.field("prop2", 1);
- doc.save();
+ doc.save();
- final OSchema schema = database.getMetadata().getSchema();
- final OClass oSuperClass = schema.getClass("classIndexManagerTestSuperClass");
- final OClass oClass = schema.getClass("classIndexManagerTestClass");
+ final OSchema schema = database.getMetadata().getSchema();
+ final OClass oSuperClass = schema.getClass("classIndexManagerTestSuperClass");
+ final OClass oClass = schema.getClass("classIndexManagerTestClass");
- final OIndex<?> propOneIndex = oClass.getClassIndex("classIndexManagerTestClass.prop1");
- final OIndex<?> compositeIndex = oClass.getClassIndex("classIndexManagerComposite");
- final OIndex<?> propZeroIndex = oSuperClass.getClassIndex("classIndexManagerTestSuperClass.prop0");
+ final OIndex<?> propOneIndex = oClass.getClassIndex("classIndexManagerTestClass.prop1");
+ final OIndex<?> compositeIndex = oClass.getClassIndex("classIndexManagerComposite");
+ final OIndex<?> propZeroIndex = oSuperClass.getClassIndex("classIndexManagerTestSuperClass.prop0");
- Assert.assertEquals(propOneIndex.getSize(), 1);
- Assert.assertEquals(compositeIndex.getSize(), 1);
- Assert.assertEquals(propZeroIndex.getSize(), 1);
+ Assert.assertEquals(propOneIndex.getSize(), 1);
+ Assert.assertEquals(compositeIndex.getSize(), 1);
+ Assert.assertEquals(propZeroIndex.getSize(), 1);
- doc.field("prop2", (Object) null);
- doc.field("prop0", (Object) null);
- doc.save();
+ doc.field("prop2", (Object) null);
+ doc.field("prop0", (Object) null);
+ doc.save();
- Assert.assertEquals(propOneIndex.getSize(), 1);
- Assert.assertEquals(compositeIndex.getSize(), 0);
- Assert.assertEquals(propZeroIndex.getSize(), 0);
- }
+ Assert.assertEquals(propOneIndex.getSize(), 1);
+ Assert.assertEquals(compositeIndex.getSize(), 0);
+ Assert.assertEquals(propZeroIndex.getSize(), 0);
+ }
- public void testUpdateDocumentIndexRecordUpdated() {
- final ODocument doc = new ODocument("classIndexManagerTestClass");
- doc.field("prop0", "x");
- doc.field("prop1", "a");
- doc.field("prop2", 1);
+ public void testUpdateDocumentIndexRecordUpdated() {
+ final ODocument doc = new ODocument("classIndexManagerTestClass");
+ doc.field("prop0", "x");
+ doc.field("prop1", "a");
+ doc.field("prop2", 1);
- doc.save();
+ doc.save();
- final OSchema schema = database.getMetadata().getSchema();
- final OClass oSuperClass = schema.getClass("classIndexManagerTestSuperClass");
- final OClass oClass = schema.getClass("classIndexManagerTestClass");
+ final OSchema schema = database.getMetadata().getSchema();
+ final OClass oSuperClass = schema.getClass("classIndexManagerTestSuperClass");
+ final OClass oClass = schema.getClass("classIndexManagerTestClass");
- final OIndex<?> propZeroIndex = oSuperClass.getClassIndex("classIndexManagerTestSuperClass.prop0");
- final OIndex<?> propOneIndex = oClass.getClassIndex("classIndexManagerTestClass.prop1");
- final OIndex<?> compositeIndex = oClass.getClassIndex("classIndexManagerComposite");
- final OIndexDefinition compositeIndexDefinition = compositeIndex.getDefinition();
+ final OIndex<?> propZeroIndex = oSuperClass.getClassIndex("classIndexManagerTestSuperClass.prop0");
+ final OIndex<?> propOneIndex = oClass.getClassIndex("classIndexManagerTestClass.prop1");
+ final OIndex<?> compositeIndex = oClass.getClassIndex("classIndexManagerComposite");
+ final OIndexDefinition compositeIndexDefinition = compositeIndex.getDefinition();
- Assert.assertEquals(propOneIndex.getSize(), 1);
- Assert.assertEquals(compositeIndex.getSize(), 1);
- Assert.assertEquals(propZeroIndex.getSize(), 1);
+ Assert.assertEquals(propOneIndex.getSize(), 1);
+ Assert.assertEquals(compositeIndex.getSize(), 1);
+ Assert.assertEquals(propZeroIndex.getSize(), 1);
- doc.field("prop2", 2);
- doc.field("prop0", "y");
- doc.save();
+ doc.field("prop2", 2);
+ doc.field("prop0", "y");
+ doc.save();
- Assert.assertEquals(propOneIndex.getSize(), 1);
- Assert.assertEquals(compositeIndex.getSize(), 1);
- Assert.assertEquals(propZeroIndex.getSize(), 1);
+ Assert.assertEquals(propOneIndex.getSize(), 1);
+ Assert.assertEquals(compositeIndex.getSize(), 1);
+ Assert.assertEquals(propZeroIndex.getSize(), 1);
- Assert.assertNotNull(propZeroIndex.get("y"));
- Assert.assertNotNull(propOneIndex.get("a"));
- Assert.assertNotNull(compositeIndex.get(compositeIndexDefinition.createValue("a", 2)));
- }
+ Assert.assertNotNull(propZeroIndex.get("y"));
+ Assert.assertNotNull(propOneIndex.get("a"));
+ Assert.assertNotNull(compositeIndex.get(compositeIndexDefinition.createValue("a", 2)));
+ }
- public void testUpdateDocumentIndexRecordUpdatedFromNullField() {
- final ODocument doc = new ODocument("classIndexManagerTestClass");
- doc.field("prop1", "a");
- doc.field("prop2", (Object) null);
+ public void testUpdateDocumentIndexRecordUpdatedFromNullField() {
+ final ODocument doc = new ODocument("classIndexManagerTestClass");
+ doc.field("prop1", "a");
+ doc.field("prop2", (Object) null);
- doc.save();
+ doc.save();
- final OSchema schema = database.getMetadata().getSchema();
- final OClass oClass = schema.getClass("classIndexManagerTestClass");
+ final OSchema schema = database.getMetadata().getSchema();
+ final OClass oClass = schema.getClass("classIndexManagerTestClass");
- final OIndex<?> propOneIndex = oClass.getClassIndex("classIndexManagerTestClass.prop1");
- final OIndex<?> compositeIndex = oClass.getClassIndex("classIndexManagerComposite");
- final OIndexDefinition compositeIndexDefinition = compositeIndex.getDefinition();
+ final OIndex<?> propOneIndex = oClass.getClassIndex("classIndexManagerTestClass.prop1");
+ final OIndex<?> compositeIndex = oClass.getClassIndex("classIndexManagerComposite");
+ final OIndexDefinition compositeIndexDefinition = compositeIndex.getDefinition();
- Assert.assertEquals(propOneIndex.getSize(), 1);
- Assert.assertEquals(compositeIndex.getSize(), 0);
+ Assert.assertEquals(propOneIndex.getSize(), 1);
+ Assert.assertEquals(compositeIndex.getSize(), 0);
- doc.field("prop2", 2);
- doc.save();
+ doc.field("prop2", 2);
+ doc.save();
- Assert.assertEquals(propOneIndex.getSize(), 1);
- Assert.assertEquals(compositeIndex.getSize(), 1);
+ Assert.assertEquals(propOneIndex.getSize(), 1);
+ Assert.assertEquals(compositeIndex.getSize(), 1);
- Assert.assertNotNull(propOneIndex.get("a"));
- Assert.assertNotNull(compositeIndex.get(compositeIndexDefinition.createValue("a", 2)));
- }
+ Assert.assertNotNull(propOneIndex.get("a"));
+ Assert.assertNotNull(compositeIndex.get(compositeIndexDefinition.createValue("a", 2)));
+ }
- public void testListUpdate() {
- final OSchema schema = database.getMetadata().getSchema();
- final OClass oClass = schema.getClass("classIndexManagerTestClass");
+ public void testListUpdate() {
+ final OSchema schema = database.getMetadata().getSchema();
+ final OClass oClass = schema.getClass("classIndexManagerTestClass");
- final OIndex<?> propFourIndex = oClass.getClassIndex("classIndexManagerTestClass.prop4");
+ final OIndex<?> propFourIndex = oClass.getClassIndex("classIndexManagerTestClass.prop4");
- Assert.assertEquals(propFourIndex.getSize(), 0);
+ Assert.assertEquals(propFourIndex.getSize(), 0);
- final ODocument doc = new ODocument("classIndexManagerTestClass");
+ final ODocument doc = new ODocument("classIndexManagerTestClass");
- final List<String> listProperty = new ArrayList<String>();
- listProperty.add("value1");
- listProperty.add("value2");
+ final List<String> listProperty = new ArrayList<String>();
+ listProperty.add("value1");
+ listProperty.add("value2");
- doc.field("prop4", listProperty);
- doc.save();
+ doc.field("prop4", listProperty);
+ doc.save();
- Assert.assertEquals(propFourIndex.getSize(), 2);
- Assert.assertNotNull(propFourIndex.get("value1"));
- Assert.assertNotNull(propFourIndex.get("value2"));
+ Assert.assertEquals(propFourIndex.getSize(), 2);
+ Assert.assertNotNull(propFourIndex.get("value1"));
+ Assert.assertNotNull(propFourIndex.get("value2"));
- List<String> trackedList = doc.field("prop4");
- trackedList.set(0, "value3");
+ List<String> trackedList = doc.field("prop4");
+ trackedList.set(0, "value3");
- trackedList.add("value4");
- trackedList.add("value4");
- trackedList.add("value4");
- trackedList.remove("value4");
- trackedList.remove("value2");
- trackedList.add("value5");
+ trackedList.add("value4");
+ trackedList.add("value4");
+ trackedList.add("value4");
+ trackedList.remove("value4");
+ trackedList.remove("value2");
+ trackedList.add("value5");
- doc.save();
+ doc.save();
- Assert.assertEquals(propFourIndex.getSize(), 3);
- Assert.assertNotNull(propFourIndex.get("value3"));
- Assert.assertNotNull(propFourIndex.get("value4"));
- Assert.assertNotNull(propFourIndex.get("value5"));
- }
+ Assert.assertEquals(propFourIndex.getSize(), 3);
+ Assert.assertNotNull(propFourIndex.get("value3"));
+ Assert.assertNotNull(propFourIndex.get("value4"));
+ Assert.assertNotNull(propFourIndex.get("value5"));
+ }
+ public void testMapUpdate() {
+ final OSchema schema = database.getMetadata().getSchema();
+ final OClass oClass = schema.getClass("classIndexManagerTestClass");
- public void testMapUpdate() {
- final OSchema schema = database.getMetadata().getSchema();
- final OClass oClass = schema.getClass("classIndexManagerTestClass");
+ final OIndex<?> propFiveIndexKey = oClass.getClassIndex("classIndexManagerTestIndexByKey");
+ final OIndex<?> propFiveIndexValue = oClass.getClassIndex("classIndexManagerTestIndexByValue");
- final OIndex<?> propFiveIndexKey = oClass.getClassIndex("classIndexManagerTestIndexByKey");
- final OIndex<?> propFiveIndexValue = oClass.getClassIndex("classIndexManagerTestIndexByValue");
+ Assert.assertEquals(propFiveIndexKey.getSize(), 0);
- Assert.assertEquals(propFiveIndexKey.getSize(), 0);
+ final ODocument doc = new ODocument("classIndexManagerTestClass");
- final ODocument doc = new ODocument("classIndexManagerTestClass");
+ final Map<String, String> mapProperty = new HashMap<String, String>();
+ mapProperty.put("key1", "value1");
+ mapProperty.put("key2", "value2");
- final Map<String, String> mapProperty = new HashMap<String, String>();
- mapProperty.put("key1", "value1");
- mapProperty.put("key2", "value2");
+ doc.field("prop5", mapProperty);
+ doc.save();
- doc.field("prop5", mapProperty);
- doc.save();
+ Assert.assertEquals(propFiveIndexKey.getSize(), 2);
+ Assert.assertNotNull(propFiveIndexKey.get("key1"));
+ Assert.assertNotNull(propFiveIndexKey.get("key2"));
- Assert.assertEquals(propFiveIndexKey.getSize(), 2);
- Assert.assertNotNull(propFiveIndexKey.get("key1"));
- Assert.assertNotNull(propFiveIndexKey.get("key2"));
+ Map<String, String> trackedMap = doc.field("prop5");
+ trackedMap.put("key3", "value3");
+ trackedMap.put("key4", "value4");
+ trackedMap.remove("key1");
+ trackedMap.put("key1", "value5");
+ trackedMap.remove("key2");
+ trackedMap.put("key6", "value6");
+ trackedMap.put("key7", "value6");
+ trackedMap.put("key8", "value6");
+ trackedMap.put("key4", "value7");
- Map<String, String> trackedMap = doc.field("prop5");
- trackedMap.put("key3", "value3");
- trackedMap.put("key4", "value4");
- trackedMap.remove("key1");
- trackedMap.put("key1", "value5");
- trackedMap.remove("key2");
- trackedMap.put("key6", "value6");
- trackedMap.put("key7", "value6");
- trackedMap.put("key8", "value6");
- trackedMap.put("key4", "value7");
+ trackedMap.remove("key8");
- trackedMap.remove("key8");
+ doc.save();
+ Assert.assertEquals(propFiveIndexKey.getSize(), 5);
+ Assert.assertNotNull(propFiveIndexKey.get("key1"));
+ Assert.assertNotNull(propFiveIndexKey.get("key3"));
+ Assert.assertNotNull(propFiveIndexKey.get("key4"));
+ Assert.assertNotNull(propFiveIndexKey.get("key6"));
+ Assert.assertNotNull(propFiveIndexKey.get("key7"));
- doc.save();
+ Assert.assertEquals(propFiveIndexValue.getSize(), 4);
+ Assert.assertNotNull(propFiveIndexValue.get("value5"));
+ Assert.assertNotNull(propFiveIndexValue.get("value3"));
+ Assert.assertNotNull(propFiveIndexValue.get("value7"));
+ Assert.assertNotNull(propFiveIndexValue.get("value6"));
- Assert.assertEquals(propFiveIndexKey.getSize(), 5);
- Assert.assertNotNull(propFiveIndexKey.get("key1"));
- Assert.assertNotNull(propFiveIndexKey.get("key3"));
- Assert.assertNotNull(propFiveIndexKey.get("key4"));
- Assert.assertNotNull(propFiveIndexKey.get("key6"));
- Assert.assertNotNull(propFiveIndexKey.get("key7"));
+ }
- Assert.assertEquals(propFiveIndexValue.getSize(), 4);
- Assert.assertNotNull(propFiveIndexValue.get("value5"));
- Assert.assertNotNull(propFiveIndexValue.get("value3"));
- Assert.assertNotNull(propFiveIndexValue.get("value7"));
- Assert.assertNotNull(propFiveIndexValue.get("value6"));
+ public void testSetUpdate() {
+ final OSchema schema = database.getMetadata().getSchema();
+ final OClass oClass = schema.getClass("classIndexManagerTestClass");
- }
+ final OIndex<?> propSixIndex = oClass.getClassIndex("classIndexManagerTestClass.prop6");
- public void testSetUpdate() {
- final OSchema schema = database.getMetadata().getSchema();
- final OClass oClass = schema.getClass("classIndexManagerTestClass");
+ Assert.assertEquals(propSixIndex.getSize(), 0);
- final OIndex<?> propSixIndex = oClass.getClassIndex("classIndexManagerTestClass.prop6");
+ final ODocument doc = new ODocument("classIndexManagerTestClass");
- Assert.assertEquals(propSixIndex.getSize(), 0);
+ final Set<String> setProperty = new HashSet<String>();
+ setProperty.add("value1");
+ setProperty.add("value2");
- final ODocument doc = new ODocument("classIndexManagerTestClass");
+ doc.field("prop6", setProperty);
+ doc.save();
- final Set<String> setProperty = new HashSet<String>();
- setProperty.add("value1");
- setProperty.add("value2");
+ Assert.assertEquals(propSixIndex.getSize(), 2);
+ Assert.assertNotNull(propSixIndex.get("value1"));
+ Assert.assertNotNull(propSixIndex.get("value2"));
- doc.field("prop6", setProperty);
- doc.save();
+ Set<String> trackedSet = doc.field("prop6");
- Assert.assertEquals(propSixIndex.getSize(), 2);
- Assert.assertNotNull(propSixIndex.get("value1"));
- Assert.assertNotNull(propSixIndex.get("value2"));
+ trackedSet.add("value4");
+ trackedSet.add("value4");
+ trackedSet.add("value4");
+ trackedSet.remove("value4");
+ trackedSet.remove("value2");
+ trackedSet.add("value5");
- Set<String> trackedSet = doc.field("prop6");
+ doc.save();
- trackedSet.add("value4");
- trackedSet.add("value4");
- trackedSet.add("value4");
- trackedSet.remove("value4");
- trackedSet.remove("value2");
- trackedSet.add("value5");
+ Assert.assertEquals(propSixIndex.getSize(), 2);
+ Assert.assertNotNull(propSixIndex.get("value1"));
+ Assert.assertNotNull(propSixIndex.get("value5"));
+ }
- doc.save();
+ public void testListDelete() {
+ final OSchema schema = database.getMetadata().getSchema();
+ final OClass oClass = schema.getClass("classIndexManagerTestClass");
- Assert.assertEquals(propSixIndex.getSize(), 2);
- Assert.assertNotNull(propSixIndex.get("value1"));
- Assert.assertNotNull(propSixIndex.get("value5"));
- }
+ final OIndex<?> propFourIndex = oClass.getClassIndex("classIndexManagerTestClass.prop4");
- public void testListDelete() {
- final OSchema schema = database.getMetadata().getSchema();
- final OClass oClass = schema.getClass("classIndexManagerTestClass");
+ Assert.assertEquals(propFourIndex.getSize(), 0);
- final OIndex<?> propFourIndex = oClass.getClassIndex("classIndexManagerTestClass.prop4");
+ final ODocument doc = new ODocument("classIndexManagerTestClass");
- Assert.assertEquals(propFourIndex.getSize(), 0);
+ final List<String> listProperty = new ArrayList<String>();
+ listProperty.add("value1");
+ listProperty.add("value2");
- final ODocument doc = new ODocument("classIndexManagerTestClass");
+ doc.field("prop4", listProperty);
+ doc.save();
- final List<String> listProperty = new ArrayList<String>();
- listProperty.add("value1");
- listProperty.add("value2");
+ Assert.assertEquals(propFourIndex.getSize(), 2);
+ Assert.assertNotNull(propFourIndex.get("value1"));
+ Assert.assertNotNull(propFourIndex.get("value2"));
- doc.field("prop4", listProperty);
- doc.save();
+ List<String> trackedList = doc.field("prop4");
+ trackedList.set(0, "value3");
- Assert.assertEquals(propFourIndex.getSize(), 2);
- Assert.assertNotNull(propFourIndex.get("value1"));
- Assert.assertNotNull(propFourIndex.get("value2"));
+ trackedList.add("value4");
+ trackedList.add("value4");
+ trackedList.add("value4");
+ trackedList.remove("value4");
+ trackedList.remove("value2");
+ trackedList.add("value5");
- List<String> trackedList = doc.field("prop4");
- trackedList.set(0, "value3");
+ doc.save();
- trackedList.add("value4");
- trackedList.add("value4");
- trackedList.add("value4");
- trackedList.remove("value4");
- trackedList.remove("value2");
- trackedList.add("value5");
+ Assert.assertEquals(propFourIndex.getSize(), 3);
+ Assert.assertNotNull(propFourIndex.get("value3"));
+ Assert.assertNotNull(propFourIndex.get("value4"));
+ Assert.assertNotNull(propFourIndex.get("value5"));
- doc.save();
+ trackedList = doc.field("prop4");
+ trackedList.remove("value3");
+ trackedList.remove("value4");
+ trackedList.add("value8");
- Assert.assertEquals(propFourIndex.getSize(), 3);
- Assert.assertNotNull(propFourIndex.get("value3"));
- Assert.assertNotNull(propFourIndex.get("value4"));
- Assert.assertNotNull(propFourIndex.get("value5"));
+ doc.delete();
- trackedList = doc.field("prop4");
- trackedList.remove("value3");
- trackedList.remove("value4");
- trackedList.add("value8");
+ Assert.assertEquals(propFourIndex.getSize(), 0);
+ }
- doc.delete();
+ public void testMapDelete() {
+ final OSchema schema = database.getMetadata().getSchema();
+ final OClass oClass = schema.getClass("classIndexManagerTestClass");
- Assert.assertEquals(propFourIndex.getSize(), 0);
- }
+ final OIndex<?> propFiveIndexKey = oClass.getClassIndex("classIndexManagerTestIndexByKey");
+ final OIndex<?> propFiveIndexValue = oClass.getClassIndex("classIndexManagerTestIndexByValue");
+ Assert.assertEquals(propFiveIndexKey.getSize(), 0);
- public void testMapDelete() {
- final OSchema schema = database.getMetadata().getSchema();
- final OClass oClass = schema.getClass("classIndexManagerTestClass");
+ final ODocument doc = new ODocument("classIndexManagerTestClass");
- final OIndex<?> propFiveIndexKey = oClass.getClassIndex("classIndexManagerTestIndexByKey");
- final OIndex<?> propFiveIndexValue = oClass.getClassIndex("classIndexManagerTestIndexByValue");
+ final Map<String, String> mapProperty = new HashMap<String, String>();
+ mapProperty.put("key1", "value1");
+ mapProperty.put("key2", "value2");
- Assert.assertEquals(propFiveIndexKey.getSize(), 0);
+ doc.field("prop5", mapProperty);
+ doc.save();
- final ODocument doc = new ODocument("classIndexManagerTestClass");
+ Assert.assertEquals(propFiveIndexKey.getSize(), 2);
+ Assert.assertNotNull(propFiveIndexKey.get("key1"));
+ Assert.assertNotNull(propFiveIndexKey.get("key2"));
- final Map<String, String> mapProperty = new HashMap<String, String>();
- mapProperty.put("key1", "value1");
- mapProperty.put("key2", "value2");
+ Map<String, String> trackedMap = doc.field("prop5");
+ trackedMap.put("key3", "value3");
+ trackedMap.put("key4", "value4");
+ trackedMap.remove("key1");
+ trackedMap.put("key1", "value5");
+ trackedMap.remove("key2");
+ trackedMap.put("key6", "value6");
+ trackedMap.put("key7", "value6");
+ trackedMap.put("key8", "value6");
+ trackedMap.put("key4", "value7");
- doc.field("prop5", mapProperty);
- doc.save();
+ trackedMap.remove("key8");
- Assert.assertEquals(propFiveIndexKey.getSize(), 2);
- Assert.assertNotNull(propFiveIndexKey.get("key1"));
- Assert.assertNotNull(propFiveIndexKey.get("key2"));
+ doc.save();
- Map<String, String> trackedMap = doc.field("prop5");
- trackedMap.put("key3", "value3");
- trackedMap.put("key4", "value4");
- trackedMap.remove("key1");
- trackedMap.put("key1", "value5");
- trackedMap.remove("key2");
- trackedMap.put("key6", "value6");
- trackedMap.put("key7", "value6");
- trackedMap.put("key8", "value6");
- trackedMap.put("key4", "value7");
+ Assert.assertEquals(propFiveIndexKey.getSize(), 5);
+ Assert.assertNotNull(propFiveIndexKey.get("key1"));
+ Assert.assertNotNull(propFiveIndexKey.get("key3"));
+ Assert.assertNotNull(propFiveIndexKey.get("key4"));
+ Assert.assertNotNull(propFiveIndexKey.get("key6"));
+ Assert.assertNotNull(propFiveIndexKey.get("key7"));
- trackedMap.remove("key8");
+ Assert.assertEquals(propFiveIndexValue.getSize(), 4);
+ Assert.assertNotNull(propFiveIndexValue.get("value5"));
+ Assert.assertNotNull(propFiveIndexValue.get("value3"));
+ Assert.assertNotNull(propFiveIndexValue.get("value7"));
+ Assert.assertNotNull(propFiveIndexValue.get("value6"));
+ trackedMap = doc.field("prop5");
- doc.save();
+ trackedMap.remove("key1");
+ trackedMap.remove("key3");
+ trackedMap.remove("key4");
+ trackedMap.put("key6", "value10");
+ trackedMap.put("key11", "value11");
- Assert.assertEquals(propFiveIndexKey.getSize(), 5);
- Assert.assertNotNull(propFiveIndexKey.get("key1"));
- Assert.assertNotNull(propFiveIndexKey.get("key3"));
- Assert.assertNotNull(propFiveIndexKey.get("key4"));
- Assert.assertNotNull(propFiveIndexKey.get("key6"));
- Assert.assertNotNull(propFiveIndexKey.get("key7"));
+ doc.delete();
- Assert.assertEquals(propFiveIndexValue.getSize(), 4);
- Assert.assertNotNull(propFiveIndexValue.get("value5"));
- Assert.assertNotNull(propFiveIndexValue.get("value3"));
- Assert.assertNotNull(propFiveIndexValue.get("value7"));
- Assert.assertNotNull(propFiveIndexValue.get("value6"));
+ Assert.assertEquals(propFiveIndexKey.getSize(), 0);
+ Assert.assertEquals(propFiveIndexValue.getSize(), 0);
+ }
- trackedMap = doc.field("prop5");
+ public void testSetDelete() {
+ final OSchema schema = database.getMetadata().getSchema();
+ final OClass oClass = schema.getClass("classIndexManagerTestClass");
- trackedMap.remove("key1");
- trackedMap.remove("key3");
- trackedMap.remove("key4");
- trackedMap.put("key6", "value10");
- trackedMap.put("key11", "value11");
+ final OIndex<?> propSixIndex = oClass.getClassIndex("classIndexManagerTestClass.prop6");
- doc.delete();
+ Assert.assertEquals(propSixIndex.getSize(), 0);
- Assert.assertEquals(propFiveIndexKey.getSize(), 0);
- Assert.assertEquals(propFiveIndexValue.getSize(), 0);
- }
+ final ODocument doc = new ODocument("classIndexManagerTestClass");
- public void testSetDelete() {
- final OSchema schema = database.getMetadata().getSchema();
- final OClass oClass = schema.getClass("classIndexManagerTestClass");
+ final Set<String> setProperty = new HashSet<String>();
+ setProperty.add("value1");
+ setProperty.add("value2");
- final OIndex<?> propSixIndex = oClass.getClassIndex("classIndexManagerTestClass.prop6");
+ doc.field("prop6", setProperty);
+ doc.save();
- Assert.assertEquals(propSixIndex.getSize(), 0);
+ Assert.assertEquals(propSixIndex.getSize(), 2);
+ Assert.assertNotNull(propSixIndex.get("value1"));
+ Assert.assertNotNull(propSixIndex.get("value2"));
- final ODocument doc = new ODocument("classIndexManagerTestClass");
+ Set<String> trackedSet = doc.field("prop6");
- final Set<String> setProperty = new HashSet<String>();
- setProperty.add("value1");
- setProperty.add("value2");
+ trackedSet.add("value4");
+ trackedSet.add("value4");
+ trackedSet.add("value4");
+ trackedSet.remove("value4");
+ trackedSet.remove("value2");
+ trackedSet.add("value5");
- doc.field("prop6", setProperty);
- doc.save();
+ doc.save();
- Assert.assertEquals(propSixIndex.getSize(), 2);
- Assert.assertNotNull(propSixIndex.get("value1"));
- Assert.assertNotNull(propSixIndex.get("value2"));
+ Assert.assertEquals(propSixIndex.getSize(), 2);
+ Assert.assertNotNull(propSixIndex.get("value1"));
+ Assert.assertNotNull(propSixIndex.get("value5"));
- Set<String> trackedSet = doc.field("prop6");
+ trackedSet = doc.field("prop6");
+ trackedSet.remove("value1");
+ trackedSet.add("value6");
- trackedSet.add("value4");
- trackedSet.add("value4");
- trackedSet.add("value4");
- trackedSet.remove("value4");
- trackedSet.remove("value2");
- trackedSet.add("value5");
+ doc.delete();
- doc.save();
+ Assert.assertEquals(propSixIndex.getSize(), 0);
+ }
- Assert.assertEquals(propSixIndex.getSize(), 2);
- Assert.assertNotNull(propSixIndex.get("value1"));
- Assert.assertNotNull(propSixIndex.get("value5"));
+ public void testDeleteDocumentIndexRecordDeleted() {
+ final ODocument doc = new ODocument("classIndexManagerTestClass");
+ doc.field("prop0", "x");
+ doc.field("prop1", "a");
+ doc.field("prop2", 1);
- trackedSet = doc.field("prop6");
- trackedSet.remove("value1");
- trackedSet.add("value6");
+ doc.save();
- doc.delete();
+ final OSchema schema = database.getMetadata().getSchema();
+ final OClass oSuperClass = schema.getClass("classIndexManagerTestSuperClass");
+ final OClass oClass = schema.getClass("classIndexManagerTestClass");
- Assert.assertEquals(propSixIndex.getSize(), 0);
- }
+ final OIndex<?> propZeroIndex = oSuperClass.getClassIndex("classIndexManagerTestSuperClass.prop0");
+ final OIndex<?> propOneIndex = oClass.getClassIndex("classIndexManagerTestClass.prop1");
+ final OIndex<?> compositeIndex = oClass.getClassIndex("classIndexManagerComposite");
+ Assert.assertEquals(propZeroIndex.getSize(), 1);
+ Assert.assertEquals(propOneIndex.getSize(), 1);
+ Assert.assertEquals(compositeIndex.getSize(), 1);
- public void testDeleteDocumentIndexRecordDeleted() {
- final ODocument doc = new ODocument("classIndexManagerTestClass");
- doc.field("prop0", "x");
- doc.field("prop1", "a");
- doc.field("prop2", 1);
+ doc.delete();
- doc.save();
+ Assert.assertEquals(propZeroIndex.getSize(), 0);
+ Assert.assertEquals(propOneIndex.getSize(), 0);
+ Assert.assertEquals(compositeIndex.getSize(), 0);
+ }
- final OSchema schema = database.getMetadata().getSchema();
- final OClass oSuperClass = schema.getClass("classIndexManagerTestSuperClass");
- final OClass oClass = schema.getClass("classIndexManagerTestClass");
+ public void testDeleteUpdatedDocumentIndexRecordDeleted() {
+ final ODocument doc = new ODocument("classIndexManagerTestClass");
+ doc.field("prop0", "x");
+ doc.field("prop1", "a");
+ doc.field("prop2", 1);
- final OIndex<?> propZeroIndex = oSuperClass.getClassIndex("classIndexManagerTestSuperClass.prop0");
- final OIndex<?> propOneIndex = oClass.getClassIndex("classIndexManagerTestClass.prop1");
- final OIndex<?> compositeIndex = oClass.getClassIndex("classIndexManagerComposite");
+ doc.save();
- Assert.assertEquals(propZeroIndex.getSize(), 1);
- Assert.assertEquals(propOneIndex.getSize(), 1);
- Assert.assertEquals(compositeIndex.getSize(), 1);
+ final OSchema schema = database.getMetadata().getSchema();
+ final OClass oSuperClass = schema.getClass("classIndexManagerTestSuperClass");
+ final OClass oClass = schema.getClass("classIndexManagerTestClass");
- doc.delete();
+ final OIndex<?> propOneIndex = oClass.getClassIndex("classIndexManagerTestClass.prop1");
+ final OIndex<?> compositeIndex = oClass.getClassIndex("classIndexManagerComposite");
- Assert.assertEquals(propZeroIndex.getSize(), 0);
- Assert.assertEquals(propOneIndex.getSize(), 0);
- Assert.assertEquals(compositeIndex.getSize(), 0);
- }
+ final OIndex<?> propZeroIndex = oSuperClass.getClassIndex("classIndexManagerTestSuperClass.prop0");
+ Assert.assertEquals(propZeroIndex.getSize(), 1);
+ Assert.assertEquals(propOneIndex.getSize(), 1);
+ Assert.assertEquals(compositeIndex.getSize(), 1);
- public void testDeleteUpdatedDocumentIndexRecordDeleted() {
- final ODocument doc = new ODocument("classIndexManagerTestClass");
- doc.field("prop0", "x");
- doc.field("prop1", "a");
- doc.field("prop2", 1);
+ doc.field("prop2", 2);
+ doc.field("prop0", "y");
- doc.save();
+ doc.delete();
- final OSchema schema = database.getMetadata().getSchema();
- final OClass oSuperClass = schema.getClass("classIndexManagerTestSuperClass");
- final OClass oClass = schema.getClass("classIndexManagerTestClass");
+ Assert.assertEquals(propZeroIndex.getSize(), 0);
+ Assert.assertEquals(propOneIndex.getSize(), 0);
+ Assert.assertEquals(compositeIndex.getSize(), 0);
+ }
- final OIndex<?> propOneIndex = oClass.getClassIndex("classIndexManagerTestClass.prop1");
- final OIndex<?> compositeIndex = oClass.getClassIndex("classIndexManagerComposite");
+ public void testDeleteUpdatedDocumentNullFieldIndexRecordDeleted() {
+ final ODocument doc = new ODocument("classIndexManagerTestClass");
+ doc.field("prop1", "a");
+ doc.field("prop2", (Object) null);
- final OIndex<?> propZeroIndex = oSuperClass.getClassIndex("classIndexManagerTestSuperClass.prop0");
- Assert.assertEquals(propZeroIndex.getSize(), 1);
- Assert.assertEquals(propOneIndex.getSize(), 1);
- Assert.assertEquals(compositeIndex.getSize(), 1);
+ doc.save();
- doc.field("prop2", 2);
- doc.field("prop0", "y");
+ final OSchema schema = database.getMetadata().getSchema();
+ final OClass oClass = schema.getClass("classIndexManagerTestClass");
- doc.delete();
+ final OIndex<?> propOneIndex = oClass.getClassIndex("classIndexManagerTestClass.prop1");
+ final OIndex<?> compositeIndex = oClass.getClassIndex("classIndexManagerComposite");
- Assert.assertEquals(propZeroIndex.getSize(), 0);
- Assert.assertEquals(propOneIndex.getSize(), 0);
- Assert.assertEquals(compositeIndex.getSize(), 0);
- }
+ Assert.assertEquals(propOneIndex.getSize(), 1);
+ Assert.assertEquals(compositeIndex.getSize(), 0);
- public void testDeleteUpdatedDocumentNullFieldIndexRecordDeleted() {
- final ODocument doc = new ODocument("classIndexManagerTestClass");
- doc.field("prop1", "a");
- doc.field("prop2", (Object) null);
+ doc.delete();
- doc.save();
+ Assert.assertEquals(propOneIndex.getSize(), 0);
+ Assert.assertEquals(compositeIndex.getSize(), 0);
+ }
- final OSchema schema = database.getMetadata().getSchema();
- final OClass oClass = schema.getClass("classIndexManagerTestClass");
+ public void testDeleteUpdatedDocumentOrigNullFieldIndexRecordDeleted() {
+ final ODocument doc = new ODocument("classIndexManagerTestClass");
+ doc.field("prop1", "a");
+ doc.field("prop2", (Object) null);
- final OIndex<?> propOneIndex = oClass.getClassIndex("classIndexManagerTestClass.prop1");
- final OIndex<?> compositeIndex = oClass.getClassIndex("classIndexManagerComposite");
+ doc.save();
- Assert.assertEquals(propOneIndex.getSize(), 1);
- Assert.assertEquals(compositeIndex.getSize(), 0);
+ final OSchema schema = database.getMetadata().getSchema();
+ final OClass oClass = schema.getClass("classIndexManagerTestClass");
- doc.delete();
+ final OIndex<?> propOneIndex = oClass.getClassIndex("classIndexManagerTestClass.prop1");
+ final OIndex<?> compositeIndex = oClass.getClassIndex("classIndexManagerComposite");
- Assert.assertEquals(propOneIndex.getSize(), 0);
- Assert.assertEquals(compositeIndex.getSize(), 0);
- }
+ Assert.assertEquals(propOneIndex.getSize(), 1);
+ Assert.assertEquals(compositeIndex.getSize(), 0);
- public void testDeleteUpdatedDocumentOrigNullFieldIndexRecordDeleted() {
- final ODocument doc = new ODocument("classIndexManagerTestClass");
- doc.field("prop1", "a");
- doc.field("prop2", (Object) null);
+ doc.field("prop2", 2);
- doc.save();
+ doc.delete();
- final OSchema schema = database.getMetadata().getSchema();
- final OClass oClass = schema.getClass("classIndexManagerTestClass");
+ Assert.assertEquals(propOneIndex.getSize(), 0);
+ Assert.assertEquals(compositeIndex.getSize(), 0);
+ }
- final OIndex<?> propOneIndex = oClass.getClassIndex("classIndexManagerTestClass.prop1");
- final OIndex<?> compositeIndex = oClass.getClassIndex("classIndexManagerComposite");
+ public void testNoClassIndexesUpdate() {
+ final ODocument doc = new ODocument("classIndexManagerTestClassTwo");
+ doc.field("prop1", "a");
+ doc.save();
- Assert.assertEquals(propOneIndex.getSize(), 1);
- Assert.assertEquals(compositeIndex.getSize(), 0);
+ doc.field("prop1", "b");
+ doc.save();
- doc.field("prop2", 2);
+ final OSchema schema = database.getMetadata().getSchema();
+ final OClass oClass = schema.getClass("classIndexManagerTestClass");
- doc.delete();
+ final Collection<OIndex<?>> indexes = oClass.getIndexes();
+ for (final OIndex<?> index : indexes) {
+ Assert.assertEquals(index.getSize(), 0);
+ }
+ }
- Assert.assertEquals(propOneIndex.getSize(), 0);
- Assert.assertEquals(compositeIndex.getSize(), 0);
- }
+ public void testNoClassIndexesDelete() {
+ final ODocument doc = new ODocument("classIndexManagerTestClassTwo");
+ doc.field("prop1", "a");
+ doc.save();
- public void testNoClassIndexesUpdate() {
- final ODocument doc = new ODocument("classIndexManagerTestClassTwo");
- doc.field("prop1", "a");
- doc.save();
+ doc.delete();
+ }
- doc.field("prop1", "b");
- doc.save();
+ public void testCollectionCompositeCreation() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
- final OSchema schema = database.getMetadata().getSchema();
- final OClass oClass = schema.getClass("classIndexManagerTestClass");
+ doc.field("prop1", "test1");
+ doc.field("prop2", Arrays.asList(1, 2));
- final Collection<OIndex<?>> indexes = oClass.getIndexes();
- for (final OIndex<?> index : indexes) {
- Assert.assertEquals(index.getSize(), 0);
- }
- }
+ doc.save();
- public void testNoClassIndexesDelete() {
- final ODocument doc = new ODocument("classIndexManagerTestClassTwo");
- doc.field("prop1", "a");
- doc.save();
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 2);
+
+ Assert.assertEquals(index.get(new OCompositeKey("test1", 1)), doc.getIdentity());
+ Assert.assertEquals(index.get(new OCompositeKey("test1", 2)), doc.getIdentity());
+
+ doc.delete();
+
+ Assert.assertEquals(index.getSize(), 0);
+ }
+
+ public void testCollectionCompositeNullSimpleFieldCreation() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
+
+ doc.field("prop1", (Object) null);
+ doc.field("prop2", Arrays.asList(1, 2));
+
+ doc.save();
+
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 0);
+
+ doc.delete();
+ }
+
+ public void testCollectionCompositeNullCollectionFieldCreation() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
+
+ doc.field("prop1", "test1");
+ doc.field("prop2", (Object) null);
+
+ doc.save();
+
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 0);
+
+ doc.delete();
+ }
+
+ public void testCollectionCompositeUpdateSimpleField() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
+
+ doc.field("prop1", "test1");
+ doc.field("prop2", Arrays.asList(1, 2));
+
+ doc.save();
+
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 2);
+
+ doc.field("prop1", "test2");
+
+ doc.save();
+
+ Assert.assertEquals(index.get(new OCompositeKey("test2", 1)), doc.getIdentity());
+ Assert.assertEquals(index.get(new OCompositeKey("test2", 2)), doc.getIdentity());
+
+ Assert.assertEquals(index.getSize(), 2);
+
+ doc.delete();
+
+ Assert.assertEquals(index.getSize(), 0);
+ }
+
+ public void testCollectionCompositeUpdateCollectionWasAssigned() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
+
+ doc.field("prop1", "test1");
+ doc.field("prop2", Arrays.asList(1, 2));
+
+ doc.save();
+
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 2);
+
+ doc.field("prop2", Arrays.asList(1, 3));
+
+ doc.save();
+
+ Assert.assertEquals(index.get(new OCompositeKey("test1", 1)), doc.getIdentity());
+ Assert.assertEquals(index.get(new OCompositeKey("test1", 3)), doc.getIdentity());
+
+ Assert.assertEquals(index.getSize(), 2);
+
+ doc.delete();
+
+ Assert.assertEquals(index.getSize(), 0);
+ }
+
+ public void testCollectionCompositeUpdateCollectionWasChanged() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
+
+ doc.field("prop1", "test1");
+ doc.field("prop2", Arrays.asList(1, 2));
+
+ doc.save();
+
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 2);
+
+ List<Integer> docList = doc.field("prop2");
+ docList.add(3);
+ docList.add(4);
+ docList.add(5);
+
+ docList.remove(0);
+
+ doc.save();
+
+ Assert.assertEquals(index.get(new OCompositeKey("test1", 2)), doc.getIdentity());
+ Assert.assertEquals(index.get(new OCompositeKey("test1", 3)), doc.getIdentity());
+ Assert.assertEquals(index.get(new OCompositeKey("test1", 4)), doc.getIdentity());
+ Assert.assertEquals(index.get(new OCompositeKey("test1", 5)), doc.getIdentity());
+
+ Assert.assertEquals(index.getSize(), 4);
+
+ doc.delete();
+
+ Assert.assertEquals(index.getSize(), 0);
+ }
+
+ public void testCollectionCompositeUpdateCollectionWasChangedSimpleFieldWasAssigned() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
+
+ doc.field("prop1", "test1");
+ doc.field("prop2", Arrays.asList(1, 2));
+
+ doc.save();
+
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 2);
+
+ List<Integer> docList = doc.field("prop2");
+ docList.add(3);
+ docList.add(4);
+ docList.add(5);
+
+ docList.remove(0);
+
+ doc.field("prop1", "test2");
+
+ doc.save();
+
+ Assert.assertEquals(index.getSize(), 4);
+
+ Assert.assertEquals(index.get(new OCompositeKey("test2", 2)), doc.getIdentity());
+ Assert.assertEquals(index.get(new OCompositeKey("test2", 3)), doc.getIdentity());
+ Assert.assertEquals(index.get(new OCompositeKey("test2", 4)), doc.getIdentity());
+ Assert.assertEquals(index.get(new OCompositeKey("test2", 5)), doc.getIdentity());
+
+ doc.delete();
+
+ Assert.assertEquals(index.getSize(), 0);
+ }
+
+ public void testCollectionCompositeUpdateSimpleFieldNull() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
+
+ doc.field("prop1", "test1");
+ doc.field("prop2", Arrays.asList(1, 2));
+
+ doc.save();
+
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 2);
+
+ doc.field("prop1", (Object) null);
+
+ doc.save();
+
+ Assert.assertEquals(index.getSize(), 0);
+
+ doc.delete();
+
+ Assert.assertEquals(index.getSize(), 0);
+ }
+
+ public void testCollectionCompositeUpdateCollectionWasAssignedNull() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
+
+ doc.field("prop1", "test1");
+ doc.field("prop2", Arrays.asList(1, 2));
+
+ doc.save();
+
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 2);
+
+ doc.field("prop2", (Object) null);
+
+ doc.save();
+
+ Assert.assertEquals(index.getSize(), 0);
+
+ doc.delete();
+
+ Assert.assertEquals(index.getSize(), 0);
+ }
+
+ public void testCollectionCompositeUpdateBothAssignedNull() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
+
+ doc.field("prop1", "test1");
+ doc.field("prop2", Arrays.asList(1, 2));
+
+ doc.save();
+
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 2);
+
+ doc.field("prop2", (Object) null);
+ doc.field("prop1", (Object) null);
+
+ doc.save();
+
+ Assert.assertEquals(index.getSize(), 0);
+
+ doc.delete();
+
+ Assert.assertEquals(index.getSize(), 0);
+ }
+
+ public void testCollectionCompositeUpdateCollectionWasChangedSimpleFieldWasAssignedNull() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
+
+ doc.field("prop1", "test1");
+ doc.field("prop2", Arrays.asList(1, 2));
+
+ doc.save();
+
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 2);
+
+ List<Integer> docList = doc.field("prop2");
+ docList.add(3);
+ docList.add(4);
+ docList.add(5);
+
+ docList.remove(0);
+
+ doc.field("prop1", (Object) null);
+
+ doc.save();
+
+ Assert.assertEquals(index.getSize(), 0);
+
+ doc.delete();
+
+ Assert.assertEquals(index.getSize(), 0);
+ }
+
+ public void testCollectionCompositeDeleteSimpleFieldAssigend() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
+
+ doc.field("prop1", "test1");
+ doc.field("prop2", Arrays.asList(1, 2));
+
+ doc.save();
+
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 2);
+
+ doc.field("prop1", "test2");
+ doc.delete();
+
+ Assert.assertEquals(index.getSize(), 0);
+ }
+
+ public void testCollectionCompositeDeleteCollectionFieldAssigend() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
+
+ doc.field("prop1", "test1");
+ doc.field("prop2", Arrays.asList(1, 2));
+
+ doc.save();
+
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 2);
+
+ doc.field("prop2", Arrays.asList(1, 3));
+ doc.delete();
+
+ Assert.assertEquals(index.getSize(), 0);
+ }
+
+ public void testCollectionCompositeDeleteCollectionFieldChanged() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
+
+ doc.field("prop1", "test1");
+ doc.field("prop2", Arrays.asList(1, 2));
+
+ doc.save();
+
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 2);
+
+ List<Integer> docList = doc.field("prop2");
+ docList.add(3);
+ docList.add(4);
+
+ docList.remove(1);
+
+ doc.delete();
+
+ Assert.assertEquals(index.getSize(), 0);
+ }
+
+ public void testCollectionCompositeDeleteBothCollectionSimpleFieldChanged() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
+
+ doc.field("prop1", "test1");
+ doc.field("prop2", Arrays.asList(1, 2));
+
+ doc.save();
+
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 2);
+
+ List<Integer> docList = doc.field("prop2");
+ docList.add(3);
+ docList.add(4);
+
+ docList.remove(1);
+
+ doc.field("prop1", "test2");
+
+ doc.delete();
+
+ Assert.assertEquals(index.getSize(), 0);
+ }
+
+ public void testCollectionCompositeDeleteBothCollectionSimpleFieldAssigend() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
+
+ doc.field("prop1", "test1");
+ doc.field("prop2", Arrays.asList(1, 2));
+
+ doc.save();
+
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 2);
+
+ doc.field("prop2", Arrays.asList(1, 3));
+ doc.field("prop1", "test2");
+ doc.delete();
+
+ Assert.assertEquals(index.getSize(), 0);
+ }
+
+ public void testCollectionCompositeDeleteSimpleFieldNull() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
+
+ doc.field("prop1", "test1");
+ doc.field("prop2", Arrays.asList(1, 2));
+
+ doc.save();
+
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 2);
+
+ doc.field("prop1", (Object) null);
+ doc.delete();
+
+ Assert.assertEquals(index.getSize(), 0);
+ }
+
+ public void testCollectionCompositeDeleteCollectionFieldNull() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
+
+ doc.field("prop1", "test1");
+ doc.field("prop2", Arrays.asList(1, 2));
+
+ doc.save();
+
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 2);
+
+ doc.field("prop2", (Object) null);
+ doc.delete();
+
+ Assert.assertEquals(index.getSize(), 0);
+ }
+
+ public void testCollectionCompositeDeleteBothSimpleCollectionFieldNull() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
+
+ doc.field("prop1", "test1");
+ doc.field("prop2", Arrays.asList(1, 2));
+
+ doc.save();
+
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 2);
+
+ doc.field("prop2", (Object) null);
+ doc.field("prop1", (Object) null);
+ doc.delete();
+
+ Assert.assertEquals(index.getSize(), 0);
+ }
+
+ public void testCollectionCompositeDeleteCollectionFieldChangedSimpleFieldNull() {
+ final ODocument doc = new ODocument("classIndexManagerTestCompositeCollectionClass");
+
+ doc.field("prop1", "test1");
+ doc.field("prop2", Arrays.asList(1, 2));
+
+ doc.save();
+
+ final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("classIndexManagerTestIndexValueAndCollection");
+ Assert.assertEquals(index.getSize(), 2);
+
+ List<Integer> docList = doc.field("prop2");
+ docList.add(3);
+ docList.add(4);
+
+ docList.remove(1);
+
+ doc.field("prop1", (Object) null);
+
+ doc.delete();
+
+ Assert.assertEquals(index.getSize(), 0);
+ }
- doc.delete();
- }
}
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/ClassIndexTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/ClassIndexTest.java
index 95ddf4feb9e..49edf9e8388 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/ClassIndexTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/ClassIndexTest.java
@@ -1,13 +1,10 @@
package com.orientechnologies.orient.test.database.auto;
-import com.orientechnologies.common.listener.OProgressListener;
-import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
-import com.orientechnologies.orient.core.index.*;
-import com.orientechnologies.orient.core.metadata.schema.OClass;
-import com.orientechnologies.orient.core.metadata.schema.OSchema;
-import com.orientechnologies.orient.core.metadata.schema.OType;
-import com.orientechnologies.orient.core.sql.OCommandSQL;
-import org.testng.annotations.*;
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertFalse;
+import static org.testng.Assert.assertNull;
+import static org.testng.Assert.assertTrue;
+import static org.testng.Assert.fail;
import java.util.Arrays;
import java.util.Collection;
@@ -15,82 +12,108 @@
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
-import static org.testng.Assert.*;
-import static org.testng.Assert.assertEquals;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.AfterMethod;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Parameters;
+import org.testng.annotations.Test;
-@Test(groups = {"index"})
-public class ClassIndexTest
-{
+import com.orientechnologies.common.listener.OProgressListener;
+import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
+import com.orientechnologies.orient.core.index.OCompositeIndexDefinition;
+import com.orientechnologies.orient.core.index.OIndex;
+import com.orientechnologies.orient.core.index.OIndexDefinition;
+import com.orientechnologies.orient.core.index.OIndexException;
+import com.orientechnologies.orient.core.index.OPropertyIndexDefinition;
+import com.orientechnologies.orient.core.index.OPropertyListIndexDefinition;
+import com.orientechnologies.orient.core.index.OPropertyMapIndexDefinition;
+import com.orientechnologies.orient.core.metadata.schema.OClass;
+import com.orientechnologies.orient.core.metadata.schema.OSchema;
+import com.orientechnologies.orient.core.metadata.schema.OType;
+import com.orientechnologies.orient.core.sql.OCommandSQL;
+
+@Test(groups = { "index" })
+public class ClassIndexTest {
private final ODatabaseDocumentTx database;
- private OClass oClass;
- private OClass oSuperClass;
+ private OClass oClass;
+ private OClass oSuperClass;
@Parameters(value = "url")
- public ClassIndexTest( final String iURL )
- {
- database = new ODatabaseDocumentTx( iURL );
+ public ClassIndexTest(final String iURL) {
+ database = new ODatabaseDocumentTx(iURL);
}
@BeforeClass
- public void beforeClass()
- {
- if ( database.isClosed() ) {
- database.open( "admin", "admin" );
+ public void beforeClass() {
+ if (database.isClosed()) {
+ database.open("admin", "admin");
}
final OSchema schema = database.getMetadata().getSchema();
- oClass = schema.createClass( "ClassIndexTestClass" );
- oSuperClass = schema.createClass( "ClassIndexTestSuperClass" );
+ oClass = schema.createClass("ClassIndexTestClass");
+ oSuperClass = schema.createClass("ClassIndexTestSuperClass");
+
+ oClass.createProperty("fOne", OType.INTEGER);
+ oClass.createProperty("fTwo", OType.STRING);
+ oClass.createProperty("fThree", OType.BOOLEAN);
+ oClass.createProperty("fFour", OType.INTEGER);
+
+ oClass.createProperty("fSix", OType.STRING);
+ oClass.createProperty("fSeven", OType.STRING);
+
+ oClass.createProperty("fEight", OType.INTEGER);
+ oClass.createProperty("fTen", OType.INTEGER);
+ oClass.createProperty("fEleven", OType.INTEGER);
+ oClass.createProperty("fTwelve", OType.INTEGER);
+ oClass.createProperty("fThirteen", OType.INTEGER);
+ oClass.createProperty("fFourteen", OType.INTEGER);
+ oClass.createProperty("fFifteen", OType.INTEGER);
+ oClass.createProperty("fEmbeddedMap", OType.EMBEDDEDMAP, OType.INTEGER);
+ oClass.createProperty("fEmbeddedMapWithoutLinkedType", OType.EMBEDDEDMAP);
+ oClass.createProperty("fLinkMap", OType.LINKMAP);
- oClass.createProperty( "fOne", OType.INTEGER );
- oClass.createProperty( "fTwo", OType.STRING );
- oClass.createProperty( "fThree", OType.BOOLEAN );
- oClass.createProperty( "fFour", OType.INTEGER );
+ oClass.createProperty("fLinkList", OType.LINKLIST);
+ oClass.createProperty("fEmbeddedList", OType.EMBEDDEDLIST, OType.INTEGER);
- oClass.createProperty( "fSix", OType.STRING );
- oClass.createProperty( "fSeven", OType.STRING );
- oClass.createProperty( "fEmbeddedMap", OType.EMBEDDEDMAP, OType.INTEGER );
- oClass.createProperty( "fEmbeddedMapWithoutLinkedType", OType.EMBEDDEDMAP );
- oClass.createProperty( "fLinkMap", OType.LINKMAP );
+ oClass.createProperty("fEmbeddedSet", OType.EMBEDDEDSET, OType.INTEGER);
+ oClass.createProperty("fLinkSet", OType.LINKSET);
- oSuperClass.createProperty( "fNine", OType.INTEGER );
- oClass.setSuperClass( oSuperClass );
+ oSuperClass.createProperty("fNine", OType.INTEGER);
+ oClass.setSuperClass(oSuperClass);
schema.save();
database.close();
}
@BeforeMethod
- public void beforeMethod()
- {
- database.open( "admin", "admin" );
+ public void beforeMethod() {
+ database.open("admin", "admin");
}
@AfterMethod
- public void afterMethod()
- {
+ public void afterMethod() {
database.close();
}
@AfterClass
- public void afterClass()
- {
- if ( database.isClosed() ) {
- database.open( "admin", "admin" );
+ public void afterClass() {
+ if (database.isClosed()) {
+ database.open("admin", "admin");
}
- database.command( new OCommandSQL( "delete from ClassIndexTestClass" ) ).execute();
- database.command( new OCommandSQL( "delete from ClassIndexTestSuperClass" ) ).execute();
- database.command( new OCommandSQL( "delete from ClassIndexInTest" ) ).execute();
+ database.command(new OCommandSQL("delete from ClassIndexTestClass")).execute();
+ database.command(new OCommandSQL("delete from ClassIndexTestSuperClass")).execute();
+ database.command(new OCommandSQL("delete from ClassIndexInTest")).execute();
- database.command( new OCommandSQL( "drop class ClassIndexInTest" ) ).execute();
- database.command( new OCommandSQL( "drop class ClassIndexTestClass" ) ).execute();
+ database.command(new OCommandSQL("drop class ClassIndexInTest")).execute();
+ database.command(new OCommandSQL("drop class ClassIndexTestClass")).execute();
database.getMetadata().getSchema().reload();
- database.command( new OCommandSQL( "drop class ClassIndexTestSuperClass" ) ).execute();
+ database.command(new OCommandSQL("drop class ClassIndexTestSuperClass")).execute();
database.getMetadata().getSchema().reload();
database.getMetadata().getIndexManager().reload();
@@ -99,978 +122,1153 @@ public void afterClass()
}
@Test
- public void testCreateOnePropertyIndexTest()
- {
- final OIndex result = oClass.createIndex( "ClassIndexTestPropertyOne", OClass.INDEX_TYPE.UNIQUE, "fOne" );
+ public void testCreateOnePropertyIndexTest() {
+ final OIndex result = oClass.createIndex("ClassIndexTestPropertyOne", OClass.INDEX_TYPE.UNIQUE, "fOne");
- assertEquals( result.getName(), "ClassIndexTestPropertyOne" );
- assertEquals( oClass.getClassIndex( "ClassIndexTestPropertyOne" ).getName(), result.getName() );
- assertEquals(
- database.getMetadata().getIndexManager().getClassIndex( "ClassIndexTestClass", "ClassIndexTestPropertyOne" ).getName(),
- result.getName() );
+ assertEquals(result.getName(), "ClassIndexTestPropertyOne");
+ assertEquals(oClass.getClassIndex("ClassIndexTestPropertyOne").getName(), result.getName());
+ assertEquals(database.getMetadata().getIndexManager().getClassIndex("ClassIndexTestClass", "ClassIndexTestPropertyOne")
+ .getName(), result.getName());
}
- @Test
- public void testCreateOnePropertyIndexInvalidName()
- {
- try {
- oClass.createIndex( "ClassIndex:TestPropertyOne", OClass.INDEX_TYPE.UNIQUE, "fOne" );
- fail();
- } catch (Exception e) {
- if(e.getCause() != null)
- e = (Exception)e.getCause();
+ @Test
+ public void testCreateOnePropertyIndexInvalidName() {
+ try {
+ oClass.createIndex("ClassIndex:TestPropertyOne", OClass.INDEX_TYPE.UNIQUE, "fOne");
+ fail();
+ } catch (Exception e) {
+ if (e.getCause() != null)
+ e = (Exception) e.getCause();
- assertTrue(e instanceof IllegalArgumentException);
- }
- }
+ assertTrue(e instanceof IllegalArgumentException);
+ }
+ }
- @Test
- public void createCompositeIndexTestWithoutListener()
- {
- final OIndex result = oClass.createIndex( "ClassIndexTestCompositeOne", OClass.INDEX_TYPE.UNIQUE, "fOne", "fTwo" );
+ @Test
+ public void createCompositeIndexTestWithoutListener() {
+ final OIndex result = oClass.createIndex("ClassIndexTestCompositeOne", OClass.INDEX_TYPE.UNIQUE, "fOne", "fTwo");
- assertEquals( result.getName(), "ClassIndexTestCompositeOne" );
- assertEquals( oClass.getClassIndex( "ClassIndexTestCompositeOne" ).getName(), result.getName() );
- assertEquals( database.getMetadata().getIndexManager().getClassIndex( "ClassIndexTestClass", "ClassIndexTestCompositeOne" ).getName(),
- result.getName() );
+ assertEquals(result.getName(), "ClassIndexTestCompositeOne");
+ assertEquals(oClass.getClassIndex("ClassIndexTestCompositeOne").getName(), result.getName());
+ assertEquals(database.getMetadata().getIndexManager().getClassIndex("ClassIndexTestClass", "ClassIndexTestCompositeOne")
+ .getName(), result.getName());
}
@Test
- public void createCompositeIndexTestWithListener()
- {
- final AtomicInteger atomicInteger = new AtomicInteger( 0 );
- final OProgressListener progressListener = new OProgressListener()
- {
- public void onBegin( final Object iTask, final long iTotal )
- {
+ public void createCompositeIndexTestWithListener() {
+ final AtomicInteger atomicInteger = new AtomicInteger(0);
+ final OProgressListener progressListener = new OProgressListener() {
+ public void onBegin(final Object iTask, final long iTotal) {
atomicInteger.incrementAndGet();
}
- public boolean onProgress( final Object iTask, final long iCounter, final float iPercent )
- {
+ public boolean onProgress(final Object iTask, final long iCounter, final float iPercent) {
return true;
}
- public void onCompletition( final Object iTask, final boolean iSucceed )
- {
+ public void onCompletition(final Object iTask, final boolean iSucceed) {
atomicInteger.incrementAndGet();
}
};
- final OIndex result = oClass.createIndex( "ClassIndexTestCompositeTwo", OClass.INDEX_TYPE.UNIQUE,
- progressListener, "fOne", "fTwo", "fThree" );
+ final OIndex result = oClass.createIndex("ClassIndexTestCompositeTwo", OClass.INDEX_TYPE.UNIQUE, progressListener, "fOne",
+ "fTwo", "fThree");
+
+ assertEquals(result.getName(), "ClassIndexTestCompositeTwo");
+ assertEquals(oClass.getClassIndex("ClassIndexTestCompositeTwo").getName(), result.getName());
+ assertEquals(database.getMetadata().getIndexManager().getClassIndex("ClassIndexTestClass", "ClassIndexTestCompositeTwo")
+ .getName(), result.getName());
+ assertEquals(atomicInteger.get(), 2);
+ }
+
+ @Test
+ public void testCreateOnePropertyEmbeddedMapIndex() {
+ final OIndex result = oClass.createIndex("ClassIndexTestPropertyEmbeddedMap", OClass.INDEX_TYPE.UNIQUE, "fEmbeddedMap");
+
+ assertEquals(result.getName(), "ClassIndexTestPropertyEmbeddedMap");
+ assertEquals(oClass.getClassIndex("ClassIndexTestPropertyEmbeddedMap").getName(), result.getName());
+ assertEquals(database.getMetadata().getIndexManager().getClassIndex("ClassIndexTestClass", "ClassIndexTestPropertyEmbeddedMap")
+ .getName(), result.getName());
+
+ final OIndexDefinition indexDefinition = result.getDefinition();
+
+ assertTrue(indexDefinition instanceof OPropertyMapIndexDefinition);
+ assertEquals(indexDefinition.getFields().get(0), "fEmbeddedMap");
+ assertEquals(indexDefinition.getTypes()[0], OType.STRING);
+ assertEquals(((OPropertyMapIndexDefinition) indexDefinition).getIndexBy(), OPropertyMapIndexDefinition.INDEX_BY.KEY);
+ }
+
+ @Test
+ public void testCreateCompositeEmbeddedMapIndex() {
+ final OIndex result = oClass.createIndex("ClassIndexTestCompositeEmbeddedMap", OClass.INDEX_TYPE.UNIQUE, "fFifteen",
+ "fEmbeddedMap");
+
+ assertEquals(result.getName(), "ClassIndexTestCompositeEmbeddedMap");
+ assertEquals(oClass.getClassIndex("ClassIndexTestCompositeEmbeddedMap").getName(), result.getName());
+ assertEquals(database.getMetadata().getIndexManager()
+ .getClassIndex("ClassIndexTestClass", "ClassIndexTestCompositeEmbeddedMap").getName(), result.getName());
- assertEquals( result.getName(), "ClassIndexTestCompositeTwo" );
- assertEquals( oClass.getClassIndex( "ClassIndexTestCompositeTwo" ).getName(), result.getName() );
- assertEquals( database.getMetadata().getIndexManager().getClassIndex( "ClassIndexTestClass", "ClassIndexTestCompositeTwo" ).getName(),
- result.getName() );
- assertEquals( atomicInteger.get(), 2 );
+ final OIndexDefinition indexDefinition = result.getDefinition();
+
+ assertTrue(indexDefinition instanceof OCompositeIndexDefinition);
+ assertEquals(indexDefinition.getFields().toArray(), new String[] { "fFifteen", "fEmbeddedMap" });
+
+ assertEquals(indexDefinition.getTypes(), new OType[] { OType.INTEGER, OType.STRING });
+ assertEquals(indexDefinition.getParamCount(), 2);
}
@Test
- public void testCreateOnePropertyEmbeddedMapIndex()
- {
- final OIndex result = oClass.createIndex( "ClassIndexTestPropertyEmbeddedMap", OClass.INDEX_TYPE.UNIQUE, "fEmbeddedMap" );
+ public void testCreateCompositeEmbeddedMapByKeyIndex() {
+ final OIndex result = oClass.createIndex("ClassIndexTestCompositeEmbeddedMapByKey", OClass.INDEX_TYPE.UNIQUE, "fEight",
+ "fEmbeddedMap");
- assertEquals( result.getName(), "ClassIndexTestPropertyEmbeddedMap" );
- assertEquals( oClass.getClassIndex( "ClassIndexTestPropertyEmbeddedMap" ).getName(), result.getName() );
+ assertEquals(result.getName(), "ClassIndexTestCompositeEmbeddedMapByKey");
+ assertEquals(oClass.getClassIndex("ClassIndexTestCompositeEmbeddedMapByKey").getName(), result.getName());
assertEquals(
- database.getMetadata().getIndexManager().getClassIndex( "ClassIndexTestClass", "ClassIndexTestPropertyEmbeddedMap" ).getName(),
- result.getName() );
+ database.getMetadata().getIndexManager().getClassIndex("ClassIndexTestClass", "ClassIndexTestCompositeEmbeddedMapByKey")
+ .getName(), result.getName());
final OIndexDefinition indexDefinition = result.getDefinition();
- assertTrue( indexDefinition instanceof OPropertyMapIndexDefinition );
- assertEquals( indexDefinition.getFields().get( 0 ), "fEmbeddedMap" );
- assertEquals( indexDefinition.getTypes()[0], OType.STRING );
- assertEquals(((OPropertyMapIndexDefinition)indexDefinition).getIndexBy(), OPropertyMapIndexDefinition.INDEX_BY.KEY );
+ assertTrue(indexDefinition instanceof OCompositeIndexDefinition);
+ assertEquals(indexDefinition.getFields().toArray(), new String[] { "fEight", "fEmbeddedMap" });
+
+ assertEquals(indexDefinition.getTypes(), new OType[] { OType.INTEGER, OType.STRING });
+ assertEquals(indexDefinition.getParamCount(), 2);
}
@Test
- public void testCreateOnePropertyLinkedMapIndex()
- {
- final OIndex result = oClass.createIndex( "ClassIndexTestPropertyLinkedMap", OClass.INDEX_TYPE.UNIQUE, "fLinkMap" );
+ public void testCreateCompositeEmbeddedMapByValueIndex() {
+ final OIndex result = oClass.createIndex("ClassIndexTestCompositeEmbeddedMapByValue", OClass.INDEX_TYPE.UNIQUE, "fTen",
+ "fEmbeddedMap by value");
- assertEquals( result.getName(), "ClassIndexTestPropertyLinkedMap" );
- assertEquals( oClass.getClassIndex( "ClassIndexTestPropertyLinkedMap" ).getName(), result.getName() );
+ assertEquals(result.getName(), "ClassIndexTestCompositeEmbeddedMapByValue");
+ assertEquals(oClass.getClassIndex("ClassIndexTestCompositeEmbeddedMapByValue").getName(), result.getName());
assertEquals(
- database.getMetadata().getIndexManager().getClassIndex( "ClassIndexTestClass", "ClassIndexTestPropertyLinkedMap" ).getName(),
- result.getName() );
+ database.getMetadata().getIndexManager().getClassIndex("ClassIndexTestClass", "ClassIndexTestCompositeEmbeddedMapByValue")
+ .getName(), result.getName());
final OIndexDefinition indexDefinition = result.getDefinition();
- assertTrue( indexDefinition instanceof OPropertyMapIndexDefinition );
- assertEquals( indexDefinition.getFields().get( 0 ), "fLinkMap" );
- assertEquals( indexDefinition.getTypes()[0], OType.STRING );
- assertEquals(((OPropertyMapIndexDefinition)indexDefinition).getIndexBy(), OPropertyMapIndexDefinition.INDEX_BY.KEY );
+ assertTrue(indexDefinition instanceof OCompositeIndexDefinition);
+ assertEquals(indexDefinition.getFields().toArray(), new String[] { "fTen", "fEmbeddedMap" });
+
+ assertEquals(indexDefinition.getTypes(), new OType[] { OType.INTEGER, OType.INTEGER });
+ assertEquals(indexDefinition.getParamCount(), 2);
}
@Test
- public void testCreateOnePropertyLinkMapByKeyIndex()
- {
- final OIndex result = oClass.createIndex( "ClassIndexTestPropertyLinkedMap", OClass.INDEX_TYPE.UNIQUE, "fLinkMap by key" );
+ public void testCreateCompositeLinkMapByValueIndex() {
+ final OIndex result = oClass.createIndex("ClassIndexTestCompositeLinkMapByValue", OClass.INDEX_TYPE.UNIQUE, "fEleven",
+ "fLinkMap by value");
- assertEquals( result.getName(), "ClassIndexTestPropertyLinkedMap" );
- assertEquals( oClass.getClassIndex( "ClassIndexTestPropertyLinkedMap" ).getName(), result.getName() );
+ assertEquals(result.getName(), "ClassIndexTestCompositeLinkMapByValue");
+ assertEquals(oClass.getClassIndex("ClassIndexTestCompositeLinkMapByValue").getName(), result.getName());
assertEquals(
- database.getMetadata().getIndexManager().getClassIndex( "ClassIndexTestClass", "ClassIndexTestPropertyLinkedMap" ).getName(),
- result.getName() );
+ database.getMetadata().getIndexManager().getClassIndex("ClassIndexTestClass", "ClassIndexTestCompositeLinkMapByValue")
+ .getName(), result.getName());
final OIndexDefinition indexDefinition = result.getDefinition();
- assertTrue( indexDefinition instanceof OPropertyMapIndexDefinition );
- assertEquals( indexDefinition.getFields().get( 0 ), "fLinkMap" );
- assertEquals( indexDefinition.getTypes()[0], OType.STRING );
- assertEquals(((OPropertyMapIndexDefinition)indexDefinition).getIndexBy(), OPropertyMapIndexDefinition.INDEX_BY.KEY );
+ assertTrue(indexDefinition instanceof OCompositeIndexDefinition);
+ assertEquals(indexDefinition.getFields().toArray(), new String[] { "fEleven", "fLinkMap" });
+
+ assertEquals(indexDefinition.getTypes(), new OType[] { OType.INTEGER, OType.LINK });
+ assertEquals(indexDefinition.getParamCount(), 2);
+ }
+
+ @Test
+ public void testCreateCompositeEmbeddedSetIndex() {
+ final OIndex result = oClass.createIndex("ClassIndexTestCompositeEmbeddedSet", OClass.INDEX_TYPE.UNIQUE, "fTwelve",
+ "fEmbeddedSet");
+
+ assertEquals(result.getName(), "ClassIndexTestCompositeEmbeddedSet");
+ assertEquals(oClass.getClassIndex("ClassIndexTestCompositeEmbeddedSet").getName(), result.getName());
+ assertEquals(database.getMetadata().getIndexManager()
+ .getClassIndex("ClassIndexTestClass", "ClassIndexTestCompositeEmbeddedSet").getName(), result.getName());
+
+ final OIndexDefinition indexDefinition = result.getDefinition();
+
+ assertTrue(indexDefinition instanceof OCompositeIndexDefinition);
+ assertEquals(indexDefinition.getFields().toArray(), new String[] { "fTwelve", "fEmbeddedSet" });
+
+ assertEquals(indexDefinition.getTypes(), new OType[] { OType.INTEGER, OType.INTEGER });
+ assertEquals(indexDefinition.getParamCount(), 2);
+ }
+
+ @Test(expectedExceptions = OIndexException.class)
+ public void testCreateCompositeLinkSetIndex() {
+ oClass.createIndex("ClassIndexTestCompositeLinkSet", OClass.INDEX_TYPE.UNIQUE, "fTwelve", "fLinkSet");
}
@Test
- public void testCreateOnePropertyLinkMapByValueIndex()
- {
- final OIndex result = oClass.createIndex( "ClassIndexTestPropertyLinkedMap", OClass.INDEX_TYPE.UNIQUE, "fLinkMap by value" );
+ public void testCreateCompositeEmbeddedListIndex() {
+ final OIndex result = oClass.createIndex("ClassIndexTestCompositeEmbeddedList", OClass.INDEX_TYPE.UNIQUE, "fThirteen",
+ "fEmbeddedList");
- assertEquals( result.getName(), "ClassIndexTestPropertyLinkedMap" );
- assertEquals( oClass.getClassIndex( "ClassIndexTestPropertyLinkedMap" ).getName(), result.getName() );
+ assertEquals(result.getName(), "ClassIndexTestCompositeEmbeddedList");
+ assertEquals(oClass.getClassIndex("ClassIndexTestCompositeEmbeddedList").getName(), result.getName());
assertEquals(
- database.getMetadata().getIndexManager().getClassIndex( "ClassIndexTestClass", "ClassIndexTestPropertyLinkedMap" ).getName(),
- result.getName() );
+ database.getMetadata().getIndexManager().getClassIndex("ClassIndexTestClass", "ClassIndexTestCompositeEmbeddedList")
+ .getName(), result.getName());
final OIndexDefinition indexDefinition = result.getDefinition();
- assertTrue( indexDefinition instanceof OPropertyMapIndexDefinition );
- assertEquals( indexDefinition.getFields().get( 0 ), "fLinkMap" );
- assertEquals( indexDefinition.getTypes()[0], OType.LINK );
- assertEquals(((OPropertyMapIndexDefinition)indexDefinition).getIndexBy(), OPropertyMapIndexDefinition.INDEX_BY.VALUE );
+ assertTrue(indexDefinition instanceof OCompositeIndexDefinition);
+ assertEquals(indexDefinition.getFields().toArray(), new String[] { "fThirteen", "fEmbeddedList" });
+
+ assertEquals(indexDefinition.getTypes(), new OType[] { OType.INTEGER, OType.INTEGER });
+ assertEquals(indexDefinition.getParamCount(), 2);
}
+ @Test
+ public void testCreateCompositeLinkListIndex() {
+ final OIndex result = oClass.createIndex("ClassIndexTestCompositeLinkList", OClass.INDEX_TYPE.UNIQUE, "fFourteen", "fLinkList");
+
+ assertEquals(result.getName(), "ClassIndexTestCompositeLinkList");
+ assertEquals(oClass.getClassIndex("ClassIndexTestCompositeLinkList").getName(), result.getName());
+ assertEquals(database.getMetadata().getIndexManager().getClassIndex("ClassIndexTestClass", "ClassIndexTestCompositeLinkList")
+ .getName(), result.getName());
+
+ final OIndexDefinition indexDefinition = result.getDefinition();
+
+ assertTrue(indexDefinition instanceof OCompositeIndexDefinition);
+ assertEquals(indexDefinition.getFields().toArray(), new String[] { "fFourteen", "fLinkList" });
+
+ assertEquals(indexDefinition.getTypes(), new OType[] { OType.INTEGER, OType.LINK });
+ assertEquals(indexDefinition.getParamCount(), 2);
+ }
@Test
- public void testCreateOnePropertyByKeyEmbeddedMapIndex()
- {
- final OIndex result = oClass.createIndex( "ClassIndexTestPropertyByKeyEmbeddedMap", OClass.INDEX_TYPE.UNIQUE, "fEmbeddedMap by key" );
+ public void testCreateOnePropertyLinkedMapIndex() {
+ final OIndex result = oClass.createIndex("ClassIndexTestPropertyLinkedMap", OClass.INDEX_TYPE.UNIQUE, "fLinkMap");
+
+ assertEquals(result.getName(), "ClassIndexTestPropertyLinkedMap");
+ assertEquals(oClass.getClassIndex("ClassIndexTestPropertyLinkedMap").getName(), result.getName());
+ assertEquals(database.getMetadata().getIndexManager().getClassIndex("ClassIndexTestClass", "ClassIndexTestPropertyLinkedMap")
+ .getName(), result.getName());
+
+ final OIndexDefinition indexDefinition = result.getDefinition();
- assertEquals( result.getName(), "ClassIndexTestPropertyByKeyEmbeddedMap" );
- assertEquals( oClass.getClassIndex( "ClassIndexTestPropertyByKeyEmbeddedMap" ).getName(), result.getName() );
+ assertTrue(indexDefinition instanceof OPropertyMapIndexDefinition);
+ assertEquals(indexDefinition.getFields().get(0), "fLinkMap");
+ assertEquals(indexDefinition.getTypes()[0], OType.STRING);
+ assertEquals(((OPropertyMapIndexDefinition) indexDefinition).getIndexBy(), OPropertyMapIndexDefinition.INDEX_BY.KEY);
+ }
+
+ @Test
+ public void testCreateOnePropertyLinkMapByKeyIndex() {
+ final OIndex result = oClass.createIndex("ClassIndexTestPropertyLinkedMapByKey", OClass.INDEX_TYPE.UNIQUE, "fLinkMap by key");
+
+ assertEquals(result.getName(), "ClassIndexTestPropertyLinkedMapByKey");
+ assertEquals(oClass.getClassIndex("ClassIndexTestPropertyLinkedMapByKey").getName(), result.getName());
assertEquals(
- database.getMetadata().getIndexManager().getClassIndex( "ClassIndexTestClass", "ClassIndexTestPropertyByKeyEmbeddedMap" ).getName(),
- result.getName() );
+ database.getMetadata().getIndexManager().getClassIndex("ClassIndexTestClass", "ClassIndexTestPropertyLinkedMapByKey")
+ .getName(), result.getName());
final OIndexDefinition indexDefinition = result.getDefinition();
- assertTrue( indexDefinition instanceof OPropertyMapIndexDefinition );
- assertEquals( indexDefinition.getFields().get( 0 ), "fEmbeddedMap" );
- assertEquals( indexDefinition.getTypes()[0], OType.STRING );
- assertEquals(((OPropertyMapIndexDefinition)indexDefinition).getIndexBy(), OPropertyMapIndexDefinition.INDEX_BY.KEY );
+ assertTrue(indexDefinition instanceof OPropertyMapIndexDefinition);
+ assertEquals(indexDefinition.getFields().get(0), "fLinkMap");
+ assertEquals(indexDefinition.getTypes()[0], OType.STRING);
+ assertEquals(((OPropertyMapIndexDefinition) indexDefinition).getIndexBy(), OPropertyMapIndexDefinition.INDEX_BY.KEY);
}
@Test
- public void testCreateOnePropertyByValueEmbeddedMapIndex()
- {
- final OIndex result = oClass.createIndex( "ClassIndexTestPropertyByValueEmbeddedMap", OClass.INDEX_TYPE.UNIQUE, "fEmbeddedMap by value" );
+ public void testCreateOnePropertyLinkMapByValueIndex() {
+ final OIndex result = oClass.createIndex("ClassIndexTestPropertyLinkedMapByValue", OClass.INDEX_TYPE.UNIQUE,
+ "fLinkMap by value");
- assertEquals( result.getName(), "ClassIndexTestPropertyByValueEmbeddedMap" );
- assertEquals( oClass.getClassIndex( "ClassIndexTestPropertyByValueEmbeddedMap" ).getName(), result.getName() );
+ assertEquals(result.getName(), "ClassIndexTestPropertyLinkedMapByValue");
+ assertEquals(oClass.getClassIndex("ClassIndexTestPropertyLinkedMapByValue").getName(), result.getName());
assertEquals(
- database.getMetadata().getIndexManager().getClassIndex( "ClassIndexTestClass", "ClassIndexTestPropertyByValueEmbeddedMap" ).getName(),
- result.getName() );
+ database.getMetadata().getIndexManager().getClassIndex("ClassIndexTestClass", "ClassIndexTestPropertyLinkedMapByValue")
+ .getName(), result.getName());
final OIndexDefinition indexDefinition = result.getDefinition();
- assertTrue( indexDefinition instanceof OPropertyMapIndexDefinition );
- assertEquals( indexDefinition.getFields().get( 0 ), "fEmbeddedMap" );
- assertEquals( indexDefinition.getTypes()[0], OType.INTEGER );
- assertEquals(((OPropertyMapIndexDefinition)indexDefinition).getIndexBy(), OPropertyMapIndexDefinition.INDEX_BY.VALUE );
+ assertTrue(indexDefinition instanceof OPropertyMapIndexDefinition);
+ assertEquals(indexDefinition.getFields().get(0), "fLinkMap");
+ assertEquals(indexDefinition.getTypes()[0], OType.LINK);
+ assertEquals(((OPropertyMapIndexDefinition) indexDefinition).getIndexBy(), OPropertyMapIndexDefinition.INDEX_BY.VALUE);
}
@Test
- public void testCreateOnePropertyWrongSpecifierEmbeddedMapIndexOne()
- {
+ public void testCreateOnePropertyByKeyEmbeddedMapIndex() {
+ final OIndex result = oClass.createIndex("ClassIndexTestPropertyByKeyEmbeddedMap", OClass.INDEX_TYPE.UNIQUE,
+ "fEmbeddedMap by key");
+
+ assertEquals(result.getName(), "ClassIndexTestPropertyByKeyEmbeddedMap");
+ assertEquals(oClass.getClassIndex("ClassIndexTestPropertyByKeyEmbeddedMap").getName(), result.getName());
+ assertEquals(
+ database.getMetadata().getIndexManager().getClassIndex("ClassIndexTestClass", "ClassIndexTestPropertyByKeyEmbeddedMap")
+ .getName(), result.getName());
+
+ final OIndexDefinition indexDefinition = result.getDefinition();
+
+ assertTrue(indexDefinition instanceof OPropertyMapIndexDefinition);
+ assertEquals(indexDefinition.getFields().get(0), "fEmbeddedMap");
+ assertEquals(indexDefinition.getTypes()[0], OType.STRING);
+ assertEquals(((OPropertyMapIndexDefinition) indexDefinition).getIndexBy(), OPropertyMapIndexDefinition.INDEX_BY.KEY);
+ }
+
+ @Test
+ public void testCreateOnePropertyByValueEmbeddedMapIndex() {
+ final OIndex result = oClass.createIndex("ClassIndexTestPropertyByValueEmbeddedMap", OClass.INDEX_TYPE.UNIQUE,
+ "fEmbeddedMap by value");
+
+ assertEquals(result.getName(), "ClassIndexTestPropertyByValueEmbeddedMap");
+ assertEquals(oClass.getClassIndex("ClassIndexTestPropertyByValueEmbeddedMap").getName(), result.getName());
+ assertEquals(
+ database.getMetadata().getIndexManager().getClassIndex("ClassIndexTestClass", "ClassIndexTestPropertyByValueEmbeddedMap")
+ .getName(), result.getName());
+
+ final OIndexDefinition indexDefinition = result.getDefinition();
+
+ assertTrue(indexDefinition instanceof OPropertyMapIndexDefinition);
+ assertEquals(indexDefinition.getFields().get(0), "fEmbeddedMap");
+ assertEquals(indexDefinition.getTypes()[0], OType.INTEGER);
+ assertEquals(((OPropertyMapIndexDefinition) indexDefinition).getIndexBy(), OPropertyMapIndexDefinition.INDEX_BY.VALUE);
+ }
+
+ @Test
+ public void testCreateOnePropertyWrongSpecifierEmbeddedMapIndexOne() {
boolean exceptionIsThrown = false;
try {
- oClass.createIndex( "ClassIndexTestPropertyWrongSpecifierEmbeddedMap", OClass.INDEX_TYPE.UNIQUE, "fEmbeddedMap by ttt" );
- } catch( IllegalArgumentException e ) {
+ oClass.createIndex("ClassIndexTestPropertyWrongSpecifierEmbeddedMap", OClass.INDEX_TYPE.UNIQUE, "fEmbeddedMap by ttt");
+ } catch (IllegalArgumentException e) {
exceptionIsThrown = true;
- assertEquals(e.getMessage(), "Illegal field name format, should be '<property> [by key|value]' but was 'fEmbeddedMap by ttt'" );
+ assertEquals(e.getMessage(), "Illegal field name format, should be '<property> [by key|value]' but was 'fEmbeddedMap by ttt'");
}
- assertTrue( exceptionIsThrown );
- assertNull( oClass.getClassIndex( "ClassIndexTestPropertyWrongSpecifierEmbeddedMap" ));
+ assertTrue(exceptionIsThrown);
+ assertNull(oClass.getClassIndex("ClassIndexTestPropertyWrongSpecifierEmbeddedMap"));
}
@Test
- public void testCreateOnePropertyWrongSpecifierEmbeddedMapIndexTwo()
- {
+ public void testCreateOnePropertyWrongSpecifierEmbeddedMapIndexTwo() {
boolean exceptionIsThrown = false;
try {
- oClass.createIndex( "ClassIndexTestPropertyWrongSpecifierEmbeddedMap", OClass.INDEX_TYPE.UNIQUE, "fEmbeddedMap b value" );
- } catch( IllegalArgumentException e ) {
+ oClass.createIndex("ClassIndexTestPropertyWrongSpecifierEmbeddedMap", OClass.INDEX_TYPE.UNIQUE, "fEmbeddedMap b value");
+ } catch (IllegalArgumentException e) {
exceptionIsThrown = true;
- assertEquals(e.getMessage(), "Illegal field name format, should be '<property> [by key|value]' but was 'fEmbeddedMap b value'" );
+ assertEquals(e.getMessage(),
+ "Illegal field name format, should be '<property> [by key|value]' but was 'fEmbeddedMap b value'");
}
- assertTrue( exceptionIsThrown );
- assertNull( oClass.getClassIndex( "ClassIndexTestPropertyWrongSpecifierEmbeddedMap" ));
+ assertTrue(exceptionIsThrown);
+ assertNull(oClass.getClassIndex("ClassIndexTestPropertyWrongSpecifierEmbeddedMap"));
}
@Test
- public void testCreateOnePropertyWrongSpecifierEmbeddedMapIndexThree()
- {
+ public void testCreateOnePropertyWrongSpecifierEmbeddedMapIndexThree() {
boolean exceptionIsThrown = false;
try {
- oClass.createIndex( "ClassIndexTestPropertyWrongSpecifierEmbeddedMap", OClass.INDEX_TYPE.UNIQUE, "fEmbeddedMap by value t" );
- } catch( IllegalArgumentException e ) {
+ oClass.createIndex("ClassIndexTestPropertyWrongSpecifierEmbeddedMap", OClass.INDEX_TYPE.UNIQUE, "fEmbeddedMap by value t");
+ } catch (IllegalArgumentException e) {
exceptionIsThrown = true;
- assertEquals(e.getMessage(), "Illegal field name format, should be '<property> [by key|value]' but was 'fEmbeddedMap by value t'" );
+ assertEquals(e.getMessage(),
+ "Illegal field name format, should be '<property> [by key|value]' but was 'fEmbeddedMap by value t'");
}
- assertTrue( exceptionIsThrown );
- assertNull( oClass.getClassIndex( "ClassIndexTestPropertyWrongSpecifierEmbeddedMap" ));
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testAreIndexedOneProperty()
- {
- final boolean result = oClass.areIndexed( Arrays.asList( "fOne" ) );
-
- assertTrue( result );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testAreIndexedDoesNotContainProperty()
- {
- final boolean result = oClass.areIndexed( Arrays.asList( "fSix" ) );
-
- assertFalse( result );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testAreIndexedTwoProperties()
- {
- final boolean result = oClass.areIndexed( Arrays.asList( "fTwo", "fOne" ) );
-
- assertTrue( result );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testAreIndexedThreeProperties()
- {
- final boolean result = oClass.areIndexed( Arrays.asList( "fTwo", "fOne", "fThree" ) );
-
- assertTrue( result );
- }
-
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testAreIndexedPropertiesNotFirst()
- {
- final boolean result = oClass.areIndexed( Arrays.asList( "fTwo", "fTree" ) );
-
- assertFalse( result );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testAreIndexedPropertiesMoreThanNeeded()
- {
- final boolean result = oClass.areIndexed( Arrays.asList( "fTwo", "fOne", "fThee", "fFour" ) );
-
- assertFalse( result );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "createParentPropertyIndex", "testCreateOnePropertyEmbeddedMapIndex",
- "testCreateOnePropertyByKeyEmbeddedMapIndex", "testCreateOnePropertyByValueEmbeddedMapIndex",
- "testCreateOnePropertyLinkedMapIndex", "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testAreIndexedParentProperty()
- {
- final boolean result = oClass.areIndexed( Arrays.asList( "fNine" ) );
-
- assertTrue( result );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex"
- , "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testAreIndexedParentChildProperty()
- {
- final boolean result = oClass.areIndexed( Arrays.asList( "fOne, fNine" ) );
-
- assertFalse( result );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testAreIndexedOnePropertyArrayParams()
- {
- final boolean result = oClass.areIndexed( "fOne" );
-
- assertTrue( result );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testAreIndexedDoesNotContainPropertyArrayParams()
- {
- final boolean result = oClass.areIndexed( "fSix" );
-
- assertFalse( result );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testAreIndexedTwoPropertiesArrayParams()
- {
- final boolean result = oClass.areIndexed( "fTwo", "fOne" );
-
- assertTrue( result );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testAreIndexedThreePropertiesArrayParams()
- {
- final boolean result = oClass.areIndexed( "fTwo", "fOne", "fThree" );
-
- assertTrue( result );
- }
-
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testAreIndexedPropertiesNotFirstArrayParams()
- {
- final boolean result = oClass.areIndexed( "fTwo", "fTree" );
-
- assertFalse( result );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testAreIndexedPropertiesMoreThanNeededArrayParams()
- {
- final boolean result = oClass.areIndexed( "fTwo", "fOne", "fThee", "fFour" );
-
- assertFalse( result );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "createParentPropertyIndex", "testCreateOnePropertyEmbeddedMapIndex",
- "testCreateOnePropertyByKeyEmbeddedMapIndex", "testCreateOnePropertyByValueEmbeddedMapIndex",
- "testCreateOnePropertyLinkedMapIndex", "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testAreIndexedParentPropertyArrayParams()
- {
- final boolean result = oClass.areIndexed( "fNine" );
-
- assertTrue( result );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testAreIndexedParentChildPropertyArrayParams()
- {
- final boolean result = oClass.areIndexed( "fOne, fNine" );
-
- assertFalse( result );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetClassInvolvedIndexesOnePropertyArrayParams()
- {
- final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes( "fOne" );
-
- assertEquals( result.size(), 3 );
-
- assertTrue( containsIndex( result, "ClassIndexTestPropertyOne" ) );
- assertTrue( containsIndex( result, "ClassIndexTestCompositeOne" ) );
- assertTrue( containsIndex( result, "ClassIndexTestCompositeTwo" ) );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetClassInvolvedIndexesTwoPropertiesArrayParams()
- {
- final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes( "fTwo", "fOne" );
- assertEquals( result.size(), 2 );
-
- assertTrue( containsIndex( result, "ClassIndexTestCompositeOne" ) );
- assertTrue( containsIndex( result, "ClassIndexTestCompositeTwo" ) );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetClassInvolvedIndexesThreePropertiesArrayParams()
- {
- final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes( "fTwo", "fOne", "fThree" );
-
- assertEquals( result.size(), 1 );
- assertEquals( result.iterator().next().getName(), "ClassIndexTestCompositeTwo" );
- }
-
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetClassInvolvedIndexesNotInvolvedPropertiesArrayParams()
- {
- final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes( "fTwo", "fFour" );
-
- assertEquals( result.size(), 0 );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetClassInvolvedIndexesPropertiesMorThanNeededArrayParams()
- {
- final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes( "fTwo", "fOne", "fThee", "fFour" );
-
- assertEquals( result.size(), 0 );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetInvolvedIndexesPropertiesMorThanNeeded()
- {
- final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes( Arrays.asList( "fTwo", "fOne", "fThee", "fFour" ) );
-
- assertEquals( result.size(), 0 );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetClassInvolvedIndexesOneProperty()
- {
- final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes( Arrays.asList( "fOne" ) );
-
- assertEquals( result.size(), 3 );
-
- assertTrue( containsIndex( result, "ClassIndexTestPropertyOne" ) );
- assertTrue( containsIndex( result, "ClassIndexTestCompositeOne" ) );
- assertTrue( containsIndex( result, "ClassIndexTestCompositeTwo" ) );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetClassInvolvedIndexesTwoProperties()
- {
- final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes( Arrays.asList( "fTwo", "fOne" ) );
- assertEquals( result.size(), 2 );
-
- assertTrue( containsIndex( result, "ClassIndexTestCompositeOne" ) );
- assertTrue( containsIndex( result, "ClassIndexTestCompositeTwo" ) );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetClassInvolvedIndexesThreeProperties()
- {
- final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes( Arrays.asList( "fTwo", "fOne", "fThree" ) );
-
- assertEquals( result.size(), 1 );
- assertEquals( result.iterator().next().getName(), "ClassIndexTestCompositeTwo" );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetClassInvolvedIndexesNotInvolvedProperties()
- {
- final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes( Arrays.asList( "fTwo", "fFour" ) );
-
- assertEquals( result.size(), 0 );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetClassInvolvedIndexesPropertiesMorThanNeeded()
- {
- final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes( Arrays.asList( "fTwo", "fOne", "fThee", "fFour" ) );
-
- assertEquals( result.size(), 0 );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetInvolvedIndexesOnePropertyArrayParams()
- {
- final Set<OIndex<?>> result = oClass.getInvolvedIndexes( "fOne" );
-
- assertEquals( result.size(), 3 );
-
- assertTrue( containsIndex( result, "ClassIndexTestPropertyOne" ) );
- assertTrue( containsIndex( result, "ClassIndexTestCompositeOne" ) );
- assertTrue( containsIndex( result, "ClassIndexTestCompositeTwo" ) );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetInvolvedIndexesTwoPropertiesArrayParams()
- {
- final Set<OIndex<?>> result = oClass.getInvolvedIndexes( "fTwo", "fOne" );
- assertEquals( result.size(), 2 );
-
- assertTrue( containsIndex( result, "ClassIndexTestCompositeOne" ) );
- assertTrue( containsIndex( result, "ClassIndexTestCompositeTwo" ) );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetInvolvedIndexesThreePropertiesArrayParams()
- {
- final Set<OIndex<?>> result = oClass.getInvolvedIndexes( "fTwo", "fOne", "fThree" );
-
- assertEquals( result.size(), 1 );
- assertEquals( result.iterator().next().getName(), "ClassIndexTestCompositeTwo" );
- }
-
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetInvolvedIndexesNotInvolvedPropertiesArrayParams()
- {
- final Set<OIndex<?>> result = oClass.getInvolvedIndexes( "fTwo", "fFour" );
-
- assertEquals( result.size(), 0 );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetParentInvolvedIndexesArrayParams()
- {
- final Set<OIndex<?>> result = oClass.getInvolvedIndexes( "fNine" );
-
- assertEquals( result.size(), 1 );
- assertEquals( result.iterator().next().getName(), "ClassIndexTestParentPropertyNine" );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetParentChildInvolvedIndexesArrayParams()
- {
- final Set<OIndex<?>> result = oClass.getInvolvedIndexes( "fOne", "fNine" );
-
- assertEquals( result.size(), 0 );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetInvolvedIndexesOneProperty()
- {
- final Set<OIndex<?>> result = oClass.getInvolvedIndexes( Arrays.asList( "fOne" ) );
-
- assertEquals( result.size(), 3 );
-
- assertTrue( containsIndex( result, "ClassIndexTestPropertyOne" ) );
- assertTrue( containsIndex( result, "ClassIndexTestCompositeOne" ) );
- assertTrue( containsIndex( result, "ClassIndexTestCompositeTwo" ) );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetInvolvedIndexesTwoProperties()
- {
- final Set<OIndex<?>> result = oClass.getInvolvedIndexes( Arrays.asList( "fTwo", "fOne" ) );
- assertEquals( result.size(), 2 );
-
- assertTrue( containsIndex( result, "ClassIndexTestCompositeOne" ) );
- assertTrue( containsIndex( result, "ClassIndexTestCompositeTwo" ) );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetInvolvedIndexesThreeProperties()
- {
- final Set<OIndex<?>> result = oClass.getInvolvedIndexes( Arrays.asList( "fTwo", "fOne", "fThree" ) );
-
- assertEquals( result.size(), 1 );
- assertEquals( result.iterator().next().getName(), "ClassIndexTestCompositeTwo" );
- }
-
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetInvolvedIndexesNotInvolvedProperties()
- {
- final Set<OIndex<?>> result = oClass.getInvolvedIndexes( Arrays.asList( "fTwo", "fFour" ) );
-
- assertEquals( result.size(), 0 );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetParentInvolvedIndexes()
- {
- final Set<OIndex<?>> result = oClass.getInvolvedIndexes( Arrays.asList( "fNine" ) );
-
- assertEquals( result.size(), 1 );
- assertEquals( result.iterator().next().getName(), "ClassIndexTestParentPropertyNine" );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetParentChildInvolvedIndexes()
- {
- final Set<OIndex<?>> result = oClass.getInvolvedIndexes( Arrays.asList( "fOne", "fNine" ) );
-
- assertEquals( result.size(), 0 );
- }
-
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
- "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
- "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetClassIndexes()
- {
- final Set<OIndex<?>> indexes = oClass.getClassIndexes();
- final Set<OIndexDefinition> expectedIndexDefinitions = new HashSet<OIndexDefinition>();
+ assertTrue(exceptionIsThrown);
+ assertNull(oClass.getClassIndex("ClassIndexTestPropertyWrongSpecifierEmbeddedMap"));
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testAreIndexedOneProperty() {
+ final boolean result = oClass.areIndexed(Arrays.asList("fOne"));
- final OCompositeIndexDefinition compositeIndexOne = new OCompositeIndexDefinition( "ClassIndexTestClass" );
+ assertTrue(result);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapIndex", "testCreateCompositeEmbeddedMapByKeyIndex",
+ "testCreateCompositeEmbeddedMapByValueIndex", "testCreateCompositeLinkMapByValueIndex",
+ "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testAreIndexedEightProperty() {
+ final boolean result = oClass.areIndexed(Arrays.asList("fEight"));
+ assertTrue(result);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByKeyIndex",
+ "testCreateCompositeEmbeddedMapByValueIndex", "testCreateCompositeLinkMapByValueIndex",
+ "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testAreIndexedEightPropertyEmbeddedMap() {
+ final boolean result = oClass.areIndexed(Arrays.asList("fEmbeddedMap", "fEight"));
+ assertTrue(result);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testAreIndexedDoesNotContainProperty() {
+ final boolean result = oClass.areIndexed(Arrays.asList("fSix"));
- compositeIndexOne.addIndex( new OPropertyIndexDefinition( "ClassIndexTestClass", "fOne", OType.INTEGER ) );
- compositeIndexOne.addIndex( new OPropertyIndexDefinition( "ClassIndexTestClass", "fTwo", OType.STRING ) );
- expectedIndexDefinitions.add( compositeIndexOne );
+ assertFalse(result);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testAreIndexedTwoProperties() {
+ final boolean result = oClass.areIndexed(Arrays.asList("fTwo", "fOne"));
- final OCompositeIndexDefinition compositeIndexTwo = new OCompositeIndexDefinition( "ClassIndexTestClass" );
+ assertTrue(result);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testAreIndexedThreeProperties() {
+ final boolean result = oClass.areIndexed(Arrays.asList("fTwo", "fOne", "fThree"));
- compositeIndexTwo.addIndex( new OPropertyIndexDefinition( "ClassIndexTestClass", "fOne", OType.INTEGER ) );
- compositeIndexTwo.addIndex( new OPropertyIndexDefinition( "ClassIndexTestClass", "fTwo", OType.STRING ) );
- compositeIndexTwo.addIndex( new OPropertyIndexDefinition( "ClassIndexTestClass", "fThree", OType.BOOLEAN ) );
- expectedIndexDefinitions.add( compositeIndexTwo );
+ assertTrue(result);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testAreIndexedPropertiesNotFirst() {
+ final boolean result = oClass.areIndexed(Arrays.asList("fTwo", "fTree"));
- final OPropertyIndexDefinition propertyIndex = new OPropertyIndexDefinition( "ClassIndexTestClass", "fOne", OType.INTEGER );
- expectedIndexDefinitions.add( propertyIndex );
+ assertFalse(result);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testAreIndexedPropertiesMoreThanNeeded() {
+ final boolean result = oClass.areIndexed(Arrays.asList("fTwo", "fOne", "fThee", "fFour"));
- final OPropertyMapIndexDefinition propertyMapIndexDefinition = new OPropertyMapIndexDefinition( "ClassIndexTestClass", "fEmbeddedMap", OType.STRING,
- OPropertyMapIndexDefinition.INDEX_BY.KEY );
- expectedIndexDefinitions.add( propertyMapIndexDefinition );
-
- final OPropertyMapIndexDefinition propertyMapByValueIndexDefinition = new OPropertyMapIndexDefinition( "ClassIndexTestClass", "fEmbeddedMap", OType.INTEGER,
- OPropertyMapIndexDefinition.INDEX_BY.VALUE );
- expectedIndexDefinitions.add( propertyMapByValueIndexDefinition );
-
- final OPropertyMapIndexDefinition propertyLinkMapByKeyIndexDefinition = new OPropertyMapIndexDefinition( "ClassIndexTestClass", "fLinkMap", OType.STRING,
- OPropertyMapIndexDefinition.INDEX_BY.KEY );
- expectedIndexDefinitions.add( propertyLinkMapByKeyIndexDefinition );
-
- final OPropertyMapIndexDefinition propertyLinkMapByValueIndexDefinition = new OPropertyMapIndexDefinition( "ClassIndexTestClass", "fLinkMap", OType.LINK,
- OPropertyMapIndexDefinition.INDEX_BY.VALUE );
- expectedIndexDefinitions.add( propertyLinkMapByValueIndexDefinition );
-
- assertEquals( indexes.size(), 7);
-
- for( final OIndex index : indexes ) {
- assertTrue( expectedIndexDefinitions.contains( index.getDefinition() ) );
- }
+ assertFalse(result);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "createParentPropertyIndex", "testCreateOnePropertyEmbeddedMapIndex",
+ "testCreateOnePropertyByKeyEmbeddedMapIndex", "testCreateOnePropertyByValueEmbeddedMapIndex",
+ "testCreateOnePropertyLinkedMapIndex", "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex",
+ "testCreateCompositeEmbeddedMapIndex", "testCreateCompositeEmbeddedMapByKeyIndex",
+ "testCreateCompositeEmbeddedMapByValueIndex", "testCreateCompositeLinkMapByValueIndex",
+ "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testAreIndexedParentProperty() {
+ final boolean result = oClass.areIndexed(Arrays.asList("fNine"));
+
+ assertTrue(result);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testAreIndexedParentChildProperty() {
+ final boolean result = oClass.areIndexed(Arrays.asList("fOne, fNine"));
+
+ assertFalse(result);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testAreIndexedOnePropertyArrayParams() {
+ final boolean result = oClass.areIndexed("fOne");
+
+ assertTrue(result);
+ }
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testAreIndexedDoesNotContainPropertyArrayParams() {
+ final boolean result = oClass.areIndexed("fSix");
+
+ assertFalse(result);
}
- @Test(dependsOnMethods = {
- "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
- "testCreateOnePropertyIndexTest", "createParentPropertyIndex", "testCreateOnePropertyEmbeddedMapIndex",
- "testCreateOnePropertyByKeyEmbeddedMapIndex", "testCreateOnePropertyByValueEmbeddedMapIndex",
- "testCreateOnePropertyLinkedMapIndex", "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex"
- })
- public void testGetIndexes()
- {
- final Set<OIndex<?>> indexes = oClass.getIndexes();
- final Set<OIndexDefinition> expectedIndexDefinitions = new HashSet<OIndexDefinition>();
-
- final OCompositeIndexDefinition compositeIndexOne = new OCompositeIndexDefinition( "ClassIndexTestClass" );
-
- compositeIndexOne.addIndex( new OPropertyIndexDefinition( "ClassIndexTestClass", "fOne", OType.INTEGER ) );
- compositeIndexOne.addIndex( new OPropertyIndexDefinition( "ClassIndexTestClass", "fTwo", OType.STRING ) );
- expectedIndexDefinitions.add( compositeIndexOne );
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testAreIndexedTwoPropertiesArrayParams() {
+ final boolean result = oClass.areIndexed("fTwo", "fOne");
+
+ assertTrue(result);
+ }
- final OCompositeIndexDefinition compositeIndexTwo = new OCompositeIndexDefinition( "ClassIndexTestClass" );
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testAreIndexedThreePropertiesArrayParams() {
+ final boolean result = oClass.areIndexed("fTwo", "fOne", "fThree");
+
+ assertTrue(result);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testAreIndexedPropertiesNotFirstArrayParams() {
+ final boolean result = oClass.areIndexed("fTwo", "fTree");
+
+ assertFalse(result);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testAreIndexedPropertiesMoreThanNeededArrayParams() {
+ final boolean result = oClass.areIndexed("fTwo", "fOne", "fThee", "fFour");
+
+ assertFalse(result);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "createParentPropertyIndex", "testCreateOnePropertyEmbeddedMapIndex",
+ "testCreateOnePropertyByKeyEmbeddedMapIndex", "testCreateOnePropertyByValueEmbeddedMapIndex",
+ "testCreateOnePropertyLinkedMapIndex", "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex",
+ "testCreateCompositeEmbeddedMapIndex", "testCreateCompositeEmbeddedMapByKeyIndex",
+ "testCreateCompositeEmbeddedMapByValueIndex", "testCreateCompositeLinkMapByValueIndex",
+ "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testAreIndexedParentPropertyArrayParams() {
+ final boolean result = oClass.areIndexed("fNine");
+
+ assertTrue(result);
+ }
- compositeIndexTwo.addIndex( new OPropertyIndexDefinition( "ClassIndexTestClass", "fOne", OType.INTEGER ) );
- compositeIndexTwo.addIndex( new OPropertyIndexDefinition( "ClassIndexTestClass", "fTwo", OType.STRING ) );
- compositeIndexTwo.addIndex( new OPropertyIndexDefinition( "ClassIndexTestClass", "fThree", OType.BOOLEAN ) );
- expectedIndexDefinitions.add( compositeIndexTwo );
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testAreIndexedParentChildPropertyArrayParams() {
+ final boolean result = oClass.areIndexed("fOne, fNine");
- final OPropertyIndexDefinition propertyIndex = new OPropertyIndexDefinition( "ClassIndexTestClass", "fOne", OType.INTEGER );
- expectedIndexDefinitions.add( propertyIndex );
+ assertFalse(result);
+ }
- final OPropertyIndexDefinition parentPropertyIndex = new OPropertyIndexDefinition( "ClassIndexTestSuperClass", "fNine", OType.INTEGER );
- expectedIndexDefinitions.add( parentPropertyIndex );
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetClassInvolvedIndexesOnePropertyArrayParams() {
+ final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes("fOne");
- final OPropertyMapIndexDefinition propertyMapIndexDefinition = new OPropertyMapIndexDefinition( "ClassIndexTestClass", "fEmbeddedMap", OType.STRING,
- OPropertyMapIndexDefinition.INDEX_BY.KEY );
- expectedIndexDefinitions.add( propertyMapIndexDefinition );
+ assertEquals(result.size(), 3);
- final OPropertyMapIndexDefinition propertyMapByValueIndexDefinition = new OPropertyMapIndexDefinition( "ClassIndexTestClass", "fEmbeddedMap", OType.INTEGER,
- OPropertyMapIndexDefinition.INDEX_BY.VALUE );
- expectedIndexDefinitions.add( propertyMapByValueIndexDefinition );
+ assertTrue(containsIndex(result, "ClassIndexTestPropertyOne"));
+ assertTrue(containsIndex(result, "ClassIndexTestCompositeOne"));
+ assertTrue(containsIndex(result, "ClassIndexTestCompositeTwo"));
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetClassInvolvedIndexesTwoPropertiesArrayParams() {
+ final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes("fTwo", "fOne");
+ assertEquals(result.size(), 2);
+
+ assertTrue(containsIndex(result, "ClassIndexTestCompositeOne"));
+ assertTrue(containsIndex(result, "ClassIndexTestCompositeTwo"));
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetClassInvolvedIndexesThreePropertiesArrayParams() {
+ final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes("fTwo", "fOne", "fThree");
+
+ assertEquals(result.size(), 1);
+ assertEquals(result.iterator().next().getName(), "ClassIndexTestCompositeTwo");
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetClassInvolvedIndexesNotInvolvedPropertiesArrayParams() {
+ final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes("fTwo", "fFour");
+
+ assertEquals(result.size(), 0);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetClassInvolvedIndexesPropertiesMorThanNeededArrayParams() {
+ final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes("fTwo", "fOne", "fThee", "fFour");
+
+ assertEquals(result.size(), 0);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetInvolvedIndexesPropertiesMorThanNeeded() {
+ final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes(Arrays.asList("fTwo", "fOne", "fThee", "fFour"));
+
+ assertEquals(result.size(), 0);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetClassInvolvedIndexesOneProperty() {
+ final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes(Arrays.asList("fOne"));
+
+ assertEquals(result.size(), 3);
+
+ assertTrue(containsIndex(result, "ClassIndexTestPropertyOne"));
+ assertTrue(containsIndex(result, "ClassIndexTestCompositeOne"));
+ assertTrue(containsIndex(result, "ClassIndexTestCompositeTwo"));
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetClassInvolvedIndexesTwoProperties() {
+ final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes(Arrays.asList("fTwo", "fOne"));
+ assertEquals(result.size(), 2);
+
+ assertTrue(containsIndex(result, "ClassIndexTestCompositeOne"));
+ assertTrue(containsIndex(result, "ClassIndexTestCompositeTwo"));
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetClassInvolvedIndexesThreeProperties() {
+ final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes(Arrays.asList("fTwo", "fOne", "fThree"));
+
+ assertEquals(result.size(), 1);
+ assertEquals(result.iterator().next().getName(), "ClassIndexTestCompositeTwo");
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetClassInvolvedIndexesNotInvolvedProperties() {
+ final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes(Arrays.asList("fTwo", "fFour"));
+
+ assertEquals(result.size(), 0);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetClassInvolvedIndexesPropertiesMorThanNeeded() {
+ final Set<OIndex<?>> result = oClass.getClassInvolvedIndexes(Arrays.asList("fTwo", "fOne", "fThee", "fFour"));
+
+ assertEquals(result.size(), 0);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetInvolvedIndexesOnePropertyArrayParams() {
+ final Set<OIndex<?>> result = oClass.getInvolvedIndexes("fOne");
+
+ assertEquals(result.size(), 3);
+
+ assertTrue(containsIndex(result, "ClassIndexTestPropertyOne"));
+ assertTrue(containsIndex(result, "ClassIndexTestCompositeOne"));
+ assertTrue(containsIndex(result, "ClassIndexTestCompositeTwo"));
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetInvolvedIndexesTwoPropertiesArrayParams() {
+ final Set<OIndex<?>> result = oClass.getInvolvedIndexes("fTwo", "fOne");
+ assertEquals(result.size(), 2);
+
+ assertTrue(containsIndex(result, "ClassIndexTestCompositeOne"));
+ assertTrue(containsIndex(result, "ClassIndexTestCompositeTwo"));
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetInvolvedIndexesThreePropertiesArrayParams() {
+ final Set<OIndex<?>> result = oClass.getInvolvedIndexes("fTwo", "fOne", "fThree");
+
+ assertEquals(result.size(), 1);
+ assertEquals(result.iterator().next().getName(), "ClassIndexTestCompositeTwo");
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetInvolvedIndexesNotInvolvedPropertiesArrayParams() {
+ final Set<OIndex<?>> result = oClass.getInvolvedIndexes("fTwo", "fFour");
+
+ assertEquals(result.size(), 0);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetParentInvolvedIndexesArrayParams() {
+ final Set<OIndex<?>> result = oClass.getInvolvedIndexes("fNine");
+
+ assertEquals(result.size(), 1);
+ assertEquals(result.iterator().next().getName(), "ClassIndexTestParentPropertyNine");
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetParentChildInvolvedIndexesArrayParams() {
+ final Set<OIndex<?>> result = oClass.getInvolvedIndexes("fOne", "fNine");
+
+ assertEquals(result.size(), 0);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetInvolvedIndexesOneProperty() {
+ final Set<OIndex<?>> result = oClass.getInvolvedIndexes(Arrays.asList("fOne"));
+
+ assertEquals(result.size(), 3);
+
+ assertTrue(containsIndex(result, "ClassIndexTestPropertyOne"));
+ assertTrue(containsIndex(result, "ClassIndexTestCompositeOne"));
+ assertTrue(containsIndex(result, "ClassIndexTestCompositeTwo"));
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetInvolvedIndexesTwoProperties() {
+ final Set<OIndex<?>> result = oClass.getInvolvedIndexes(Arrays.asList("fTwo", "fOne"));
+ assertEquals(result.size(), 2);
+
+ assertTrue(containsIndex(result, "ClassIndexTestCompositeOne"));
+ assertTrue(containsIndex(result, "ClassIndexTestCompositeTwo"));
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetInvolvedIndexesThreeProperties() {
+ final Set<OIndex<?>> result = oClass.getInvolvedIndexes(Arrays.asList("fTwo", "fOne", "fThree"));
+
+ assertEquals(result.size(), 1);
+ assertEquals(result.iterator().next().getName(), "ClassIndexTestCompositeTwo");
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetInvolvedIndexesNotInvolvedProperties() {
+ final Set<OIndex<?>> result = oClass.getInvolvedIndexes(Arrays.asList("fTwo", "fFour"));
+
+ assertEquals(result.size(), 0);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetParentInvolvedIndexes() {
+ final Set<OIndex<?>> result = oClass.getInvolvedIndexes(Arrays.asList("fNine"));
+
+ assertEquals(result.size(), 1);
+ assertEquals(result.iterator().next().getName(), "ClassIndexTestParentPropertyNine");
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetParentChildInvolvedIndexes() {
+ final Set<OIndex<?>> result = oClass.getInvolvedIndexes(Arrays.asList("fOne", "fNine"));
+
+ assertEquals(result.size(), 0);
+ }
+
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "testCreateOnePropertyEmbeddedMapIndex", "testCreateOnePropertyByKeyEmbeddedMapIndex",
+ "testCreateOnePropertyByValueEmbeddedMapIndex", "testCreateOnePropertyLinkedMapIndex",
+ "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex", "testCreateCompositeEmbeddedMapIndex",
+ "testCreateCompositeEmbeddedMapByKeyIndex", "testCreateCompositeEmbeddedMapByValueIndex",
+ "testCreateCompositeLinkMapByValueIndex", "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetClassIndexes() {
+ final Set<OIndex<?>> indexes = oClass.getClassIndexes();
+ final Set<OIndexDefinition> expectedIndexDefinitions = new HashSet<OIndexDefinition>();
- final OPropertyMapIndexDefinition propertyLinkMapByKeyIndexDefinition = new OPropertyMapIndexDefinition( "ClassIndexTestClass", "fLinkMap", OType.STRING,
- OPropertyMapIndexDefinition.INDEX_BY.KEY );
- expectedIndexDefinitions.add( propertyLinkMapByKeyIndexDefinition );
+ final OCompositeIndexDefinition compositeIndexOne = new OCompositeIndexDefinition("ClassIndexTestClass");
+
+ compositeIndexOne.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fOne", OType.INTEGER));
+ compositeIndexOne.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fTwo", OType.STRING));
+ expectedIndexDefinitions.add(compositeIndexOne);
+
+ final OCompositeIndexDefinition compositeIndexTwo = new OCompositeIndexDefinition("ClassIndexTestClass");
+
+ compositeIndexTwo.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fOne", OType.INTEGER));
+ compositeIndexTwo.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fTwo", OType.STRING));
+ compositeIndexTwo.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fThree", OType.BOOLEAN));
+ expectedIndexDefinitions.add(compositeIndexTwo);
+
+ final OCompositeIndexDefinition compositeIndexThree = new OCompositeIndexDefinition("ClassIndexTestClass");
+ compositeIndexThree.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fEight", OType.INTEGER));
+ compositeIndexThree.addIndex(new OPropertyMapIndexDefinition("ClassIndexTestClass", "fEmbeddedMap", OType.STRING,
+ OPropertyMapIndexDefinition.INDEX_BY.KEY));
+ expectedIndexDefinitions.add(compositeIndexThree);
+
+ final OCompositeIndexDefinition compositeIndexFour = new OCompositeIndexDefinition("ClassIndexTestClass");
+ compositeIndexFour.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fTen", OType.INTEGER));
+ compositeIndexFour.addIndex(new OPropertyMapIndexDefinition("ClassIndexTestClass", "fEmbeddedMap", OType.INTEGER,
+ OPropertyMapIndexDefinition.INDEX_BY.VALUE));
+ expectedIndexDefinitions.add(compositeIndexFour);
+
+ final OCompositeIndexDefinition compositeIndexFive = new OCompositeIndexDefinition("ClassIndexTestClass");
+ compositeIndexFive.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fEleven", OType.INTEGER));
+ compositeIndexFive.addIndex(new OPropertyMapIndexDefinition("ClassIndexTestClass", "fLinkMap", OType.LINK,
+ OPropertyMapIndexDefinition.INDEX_BY.VALUE));
+ expectedIndexDefinitions.add(compositeIndexFive);
+
+ final OCompositeIndexDefinition compositeIndexSix = new OCompositeIndexDefinition("ClassIndexTestClass");
+ compositeIndexSix.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fTwelve", OType.INTEGER));
+ compositeIndexSix.addIndex(new OPropertyListIndexDefinition("ClassIndexTestClass", "fEmbeddedSet", OType.INTEGER));
+ expectedIndexDefinitions.add(compositeIndexSix);
+
+ final OCompositeIndexDefinition compositeIndexSeven = new OCompositeIndexDefinition("ClassIndexTestClass");
+ compositeIndexSeven.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fThirteen", OType.INTEGER));
+ compositeIndexSeven.addIndex(new OPropertyListIndexDefinition("ClassIndexTestClass", "fEmbeddedList", OType.INTEGER));
+ expectedIndexDefinitions.add(compositeIndexSeven);
+
+ final OCompositeIndexDefinition compositeIndexEight = new OCompositeIndexDefinition("ClassIndexTestClass");
+ compositeIndexEight.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fFourteen", OType.INTEGER));
+ compositeIndexEight.addIndex(new OPropertyListIndexDefinition("ClassIndexTestClass", "fEmbeddedList", OType.LINK));
+ expectedIndexDefinitions.add(compositeIndexEight);
+
+ final OCompositeIndexDefinition compositeIndexNine = new OCompositeIndexDefinition("ClassIndexTestClass");
+ compositeIndexNine.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fFifteen", OType.INTEGER));
+ compositeIndexNine.addIndex(new OPropertyMapIndexDefinition("ClassIndexTestClass", "fEmbeddedMap", OType.STRING,
+ OPropertyMapIndexDefinition.INDEX_BY.KEY));
+ expectedIndexDefinitions.add(compositeIndexNine);
+
+ final OPropertyIndexDefinition propertyIndex = new OPropertyIndexDefinition("ClassIndexTestClass", "fOne", OType.INTEGER);
+ expectedIndexDefinitions.add(propertyIndex);
+
+ final OPropertyMapIndexDefinition propertyMapIndexDefinition = new OPropertyMapIndexDefinition("ClassIndexTestClass",
+ "fEmbeddedMap", OType.STRING, OPropertyMapIndexDefinition.INDEX_BY.KEY);
+ expectedIndexDefinitions.add(propertyMapIndexDefinition);
+
+ final OPropertyMapIndexDefinition propertyMapByValueIndexDefinition = new OPropertyMapIndexDefinition("ClassIndexTestClass",
+ "fEmbeddedMap", OType.INTEGER, OPropertyMapIndexDefinition.INDEX_BY.VALUE);
+ expectedIndexDefinitions.add(propertyMapByValueIndexDefinition);
+
+ final OPropertyMapIndexDefinition propertyLinkMapByKeyIndexDefinition = new OPropertyMapIndexDefinition("ClassIndexTestClass",
+ "fLinkMap", OType.STRING, OPropertyMapIndexDefinition.INDEX_BY.KEY);
+ expectedIndexDefinitions.add(propertyLinkMapByKeyIndexDefinition);
+
+ final OPropertyMapIndexDefinition propertyLinkMapByValueIndexDefinition = new OPropertyMapIndexDefinition(
+ "ClassIndexTestClass", "fLinkMap", OType.LINK, OPropertyMapIndexDefinition.INDEX_BY.VALUE);
+ expectedIndexDefinitions.add(propertyLinkMapByValueIndexDefinition);
+
+ assertEquals(indexes.size(), 15);
+
+ for (final OIndex index : indexes) {
+ assertTrue(expectedIndexDefinitions.contains(index.getDefinition()));
+ }
- final OPropertyMapIndexDefinition propertyLinkMapByValueIndexDefinition = new OPropertyMapIndexDefinition( "ClassIndexTestClass", "fLinkMap", OType.LINK,
- OPropertyMapIndexDefinition.INDEX_BY.VALUE );
- expectedIndexDefinitions.add( propertyLinkMapByValueIndexDefinition );
+ }
- assertEquals( indexes.size(), 8 );
+ @Test(dependsOnMethods = { "createCompositeIndexTestWithListener", "createCompositeIndexTestWithoutListener",
+ "testCreateOnePropertyIndexTest", "createParentPropertyIndex", "testCreateOnePropertyEmbeddedMapIndex",
+ "testCreateOnePropertyByKeyEmbeddedMapIndex", "testCreateOnePropertyByValueEmbeddedMapIndex",
+ "testCreateOnePropertyLinkedMapIndex", "testCreateOnePropertyLinkMapByKeyIndex", "testCreateOnePropertyLinkMapByValueIndex",
+ "testCreateCompositeEmbeddedMapIndex", "testCreateCompositeEmbeddedMapByKeyIndex",
+ "testCreateCompositeEmbeddedMapByValueIndex", "testCreateCompositeLinkMapByValueIndex",
+ "testCreateCompositeEmbeddedSetIndex", "testCreateCompositeEmbeddedListIndex" })
+ public void testGetIndexes() {
+ final Set<OIndex<?>> indexes = oClass.getIndexes();
+ final Set<OIndexDefinition> expectedIndexDefinitions = new HashSet<OIndexDefinition>();
- for( final OIndex index : indexes ) {
- assertTrue( expectedIndexDefinitions.contains( index.getDefinition() ) );
+ final OCompositeIndexDefinition compositeIndexOne = new OCompositeIndexDefinition("ClassIndexTestClass");
+
+ compositeIndexOne.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fOne", OType.INTEGER));
+ compositeIndexOne.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fTwo", OType.STRING));
+ expectedIndexDefinitions.add(compositeIndexOne);
+
+ final OCompositeIndexDefinition compositeIndexTwo = new OCompositeIndexDefinition("ClassIndexTestClass");
+
+ compositeIndexTwo.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fOne", OType.INTEGER));
+ compositeIndexTwo.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fTwo", OType.STRING));
+ compositeIndexTwo.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fThree", OType.BOOLEAN));
+ expectedIndexDefinitions.add(compositeIndexTwo);
+
+ final OCompositeIndexDefinition compositeIndexThree = new OCompositeIndexDefinition("ClassIndexTestClass");
+ compositeIndexThree.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fEight", OType.INTEGER));
+ compositeIndexThree.addIndex(new OPropertyMapIndexDefinition("ClassIndexTestClass", "fEmbeddedMap", OType.STRING,
+ OPropertyMapIndexDefinition.INDEX_BY.KEY));
+ expectedIndexDefinitions.add(compositeIndexThree);
+
+ final OCompositeIndexDefinition compositeIndexFour = new OCompositeIndexDefinition("ClassIndexTestClass");
+ compositeIndexFour.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fTen", OType.INTEGER));
+ compositeIndexFour.addIndex(new OPropertyMapIndexDefinition("ClassIndexTestClass", "fEmbeddedMap", OType.INTEGER,
+ OPropertyMapIndexDefinition.INDEX_BY.VALUE));
+ expectedIndexDefinitions.add(compositeIndexFour);
+
+ final OCompositeIndexDefinition compositeIndexFive = new OCompositeIndexDefinition("ClassIndexTestClass");
+ compositeIndexFive.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fEleven", OType.INTEGER));
+ compositeIndexFive.addIndex(new OPropertyMapIndexDefinition("ClassIndexTestClass", "fLinkMap", OType.LINK,
+ OPropertyMapIndexDefinition.INDEX_BY.VALUE));
+ expectedIndexDefinitions.add(compositeIndexFive);
+
+ final OCompositeIndexDefinition compositeIndexSix = new OCompositeIndexDefinition("ClassIndexTestClass");
+ compositeIndexSix.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fTwelve", OType.INTEGER));
+ compositeIndexSix.addIndex(new OPropertyListIndexDefinition("ClassIndexTestClass", "fEmbeddedSet", OType.INTEGER));
+ expectedIndexDefinitions.add(compositeIndexSix);
+
+ final OCompositeIndexDefinition compositeIndexSeven = new OCompositeIndexDefinition("ClassIndexTestClass");
+ compositeIndexSeven.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fThirteen", OType.INTEGER));
+ compositeIndexSeven.addIndex(new OPropertyListIndexDefinition("ClassIndexTestClass", "fEmbeddedList", OType.INTEGER));
+ expectedIndexDefinitions.add(compositeIndexSeven);
+
+ final OCompositeIndexDefinition compositeIndexEight = new OCompositeIndexDefinition("ClassIndexTestClass");
+ compositeIndexEight.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fFourteen", OType.INTEGER));
+ compositeIndexEight.addIndex(new OPropertyListIndexDefinition("ClassIndexTestClass", "fEmbeddedList", OType.LINK));
+ expectedIndexDefinitions.add(compositeIndexEight);
+
+ final OCompositeIndexDefinition compositeIndexNine = new OCompositeIndexDefinition("ClassIndexTestClass");
+ compositeIndexNine.addIndex(new OPropertyIndexDefinition("ClassIndexTestClass", "fFifteen", OType.INTEGER));
+ compositeIndexNine.addIndex(new OPropertyMapIndexDefinition("ClassIndexTestClass", "fEmbeddedMap", OType.STRING,
+ OPropertyMapIndexDefinition.INDEX_BY.KEY));
+ expectedIndexDefinitions.add(compositeIndexNine);
+
+ final OPropertyIndexDefinition propertyIndex = new OPropertyIndexDefinition("ClassIndexTestClass", "fOne", OType.INTEGER);
+ expectedIndexDefinitions.add(propertyIndex);
+
+ final OPropertyIndexDefinition parentPropertyIndex = new OPropertyIndexDefinition("ClassIndexTestSuperClass", "fNine",
+ OType.INTEGER);
+ expectedIndexDefinitions.add(parentPropertyIndex);
+
+ final OPropertyMapIndexDefinition propertyMapIndexDefinition = new OPropertyMapIndexDefinition("ClassIndexTestClass",
+ "fEmbeddedMap", OType.STRING, OPropertyMapIndexDefinition.INDEX_BY.KEY);
+ expectedIndexDefinitions.add(propertyMapIndexDefinition);
+
+ final OPropertyMapIndexDefinition propertyMapByValueIndexDefinition = new OPropertyMapIndexDefinition("ClassIndexTestClass",
+ "fEmbeddedMap", OType.INTEGER, OPropertyMapIndexDefinition.INDEX_BY.VALUE);
+ expectedIndexDefinitions.add(propertyMapByValueIndexDefinition);
+
+ final OPropertyMapIndexDefinition propertyLinkMapByKeyIndexDefinition = new OPropertyMapIndexDefinition("ClassIndexTestClass",
+ "fLinkMap", OType.STRING, OPropertyMapIndexDefinition.INDEX_BY.KEY);
+ expectedIndexDefinitions.add(propertyLinkMapByKeyIndexDefinition);
+
+ final OPropertyMapIndexDefinition propertyLinkMapByValueIndexDefinition = new OPropertyMapIndexDefinition(
+ "ClassIndexTestClass", "fLinkMap", OType.LINK, OPropertyMapIndexDefinition.INDEX_BY.VALUE);
+ expectedIndexDefinitions.add(propertyLinkMapByValueIndexDefinition);
+
+ assertEquals(indexes.size(), 16);
+
+ for (final OIndex index : indexes) {
+ assertTrue(expectedIndexDefinitions.contains(index.getDefinition()));
}
}
@Test
- public void testGetIndexesWithoutParent()
- {
-
- final OClass inClass = database.getMetadata().getSchema().createClass( "ClassIndexInTest" );
- inClass.createProperty( "fOne", OType.INTEGER );
+ public void testGetIndexesWithoutParent() {
+ final OClass inClass = database.getMetadata().getSchema().createClass("ClassIndexInTest");
+ inClass.createProperty("fOne", OType.INTEGER);
- final OIndex result = inClass.createIndex( "ClassIndexTestPropertyOne", OClass.INDEX_TYPE.UNIQUE, "fOne" );
+ final OIndex result = inClass.createIndex("ClassIndexTestPropertyOne", OClass.INDEX_TYPE.UNIQUE, "fOne");
- assertEquals( result.getName(), "ClassIndexTestPropertyOne" );
- assertEquals( inClass.getClassIndex( "ClassIndexTestPropertyOne" ).getName(), result.getName() );
+ assertEquals(result.getName(), "ClassIndexTestPropertyOne");
+ assertEquals(inClass.getClassIndex("ClassIndexTestPropertyOne").getName(), result.getName());
final Set<OIndex<?>> indexes = inClass.getIndexes();
- final OPropertyIndexDefinition propertyIndexDefinition = new OPropertyIndexDefinition( "ClassIndexInTest", "fOne", OType.INTEGER );
+ final OPropertyIndexDefinition propertyIndexDefinition = new OPropertyIndexDefinition("ClassIndexInTest", "fOne", OType.INTEGER);
- assertEquals( indexes.size(), 1 );
+ assertEquals(indexes.size(), 1);
- assertTrue( indexes.iterator().next().getDefinition().equals( propertyIndexDefinition ) );
+ assertTrue(indexes.iterator().next().getDefinition().equals(propertyIndexDefinition));
}
@Test(expectedExceptions = OIndexException.class)
- public void testCreateIndexEmptyFields()
- {
- oClass.createIndex( "ClassIndexTestCompositeEmpty", OClass.INDEX_TYPE.UNIQUE );
+ public void testCreateIndexEmptyFields() {
+ oClass.createIndex("ClassIndexTestCompositeEmpty", OClass.INDEX_TYPE.UNIQUE);
}
@Test(expectedExceptions = OIndexException.class)
- public void testCreateIndexAbsentFields()
- {
- oClass.createIndex( "ClassIndexTestCompositeFieldAbsent", OClass.INDEX_TYPE.UNIQUE, "fFive" );
+ public void testCreateIndexAbsentFields() {
+ oClass.createIndex("ClassIndexTestCompositeFieldAbsent", OClass.INDEX_TYPE.UNIQUE, "fFive");
}
@Test(expectedExceptions = OIndexException.class)
- public void testCreateProxyIndex()
- {
- oClass.createIndex( "ClassIndexTestProxyIndex", OClass.INDEX_TYPE.PROXY, "fOne" );
+ public void testCreateProxyIndex() {
+ oClass.createIndex("ClassIndexTestProxyIndex", OClass.INDEX_TYPE.PROXY, "fOne");
}
@Test(expectedExceptions = OIndexException.class)
- public void testCreateFullTextIndexTwoProperties()
- {
- oClass.createIndex( "ClassIndexTestFulltextIndex", OClass.INDEX_TYPE.FULLTEXT, "fSix", "fSeven" );
+ public void testCreateFullTextIndexTwoProperties() {
+ oClass.createIndex("ClassIndexTestFulltextIndex", OClass.INDEX_TYPE.FULLTEXT, "fSix", "fSeven");
}
@Test
- public void testCreateFullTextIndexOneProperty()
- {
- final OIndex<?> result = oClass.createIndex( "ClassIndexTestFulltextIndex", OClass.INDEX_TYPE.FULLTEXT, "fSix" );
+ public void testCreateFullTextIndexOneProperty() {
+ final OIndex<?> result = oClass.createIndex("ClassIndexTestFulltextIndex", OClass.INDEX_TYPE.FULLTEXT, "fSix");
- assertEquals( result.getName(), "ClassIndexTestFulltextIndex" );
- assertEquals( oClass.getClassIndex( "ClassIndexTestFulltextIndex" ).getName(), result.getName() );
- assertEquals( result.getType(), OClass.INDEX_TYPE.FULLTEXT.toString() );
+ assertEquals(result.getName(), "ClassIndexTestFulltextIndex");
+ assertEquals(oClass.getClassIndex("ClassIndexTestFulltextIndex").getName(), result.getName());
+ assertEquals(result.getType(), OClass.INDEX_TYPE.FULLTEXT.toString());
}
@Test
- public void testCreateDictionaryIndex()
- {
- final OIndex<?> result = oClass.createIndex( "ClassIndexTestDictionaryIndex", OClass.INDEX_TYPE.DICTIONARY, "fOne" );
+ public void testCreateDictionaryIndex() {
+ final OIndex<?> result = oClass.createIndex("ClassIndexTestDictionaryIndex", OClass.INDEX_TYPE.DICTIONARY, "fOne");
- assertEquals( result.getName(), "ClassIndexTestDictionaryIndex" );
- assertEquals( oClass.getClassIndex( "ClassIndexTestDictionaryIndex" ).getName(), result.getName() );
- assertEquals( result.getType(), OClass.INDEX_TYPE.DICTIONARY.toString() );
+ assertEquals(result.getName(), "ClassIndexTestDictionaryIndex");
+ assertEquals(oClass.getClassIndex("ClassIndexTestDictionaryIndex").getName(), result.getName());
+ assertEquals(result.getType(), OClass.INDEX_TYPE.DICTIONARY.toString());
}
@Test
- public void testCreateNotUniqueIndex()
- {
- final OIndex<?> result = oClass.createIndex( "ClassIndexTestNotUniqueIndex", OClass.INDEX_TYPE.NOTUNIQUE, "fOne" );
+ public void testCreateNotUniqueIndex() {
+ final OIndex<?> result = oClass.createIndex("ClassIndexTestNotUniqueIndex", OClass.INDEX_TYPE.NOTUNIQUE, "fOne");
- assertEquals( result.getName(), "ClassIndexTestNotUniqueIndex" );
- assertEquals( oClass.getClassIndex( "ClassIndexTestNotUniqueIndex" ).getName(), result.getName() );
- assertEquals( result.getType(), OClass.INDEX_TYPE.NOTUNIQUE.toString() );
+ assertEquals(result.getName(), "ClassIndexTestNotUniqueIndex");
+ assertEquals(oClass.getClassIndex("ClassIndexTestNotUniqueIndex").getName(), result.getName());
+ assertEquals(result.getType(), OClass.INDEX_TYPE.NOTUNIQUE.toString());
}
@Test
public void testCreateMapWithoutLinkedType() {
try {
- oClass.createIndex( "ClassIndexMapWithoutLinkedTypeIndex", OClass.INDEX_TYPE.NOTUNIQUE, "fEmbeddedMapWithoutLinkedType by value" );
+ oClass.createIndex("ClassIndexMapWithoutLinkedTypeIndex", OClass.INDEX_TYPE.NOTUNIQUE,
+ "fEmbeddedMapWithoutLinkedType by value");
fail();
} catch (OIndexException e) {
- assertEquals(e.getMessage(), "Linked type was not provided. " +
- "You should provide linked type for embedded collections that are going to be indexed.");
+ assertEquals(e.getMessage(), "Linked type was not provided. "
+ + "You should provide linked type for embedded collections that are going to be indexed.");
}
}
-
- public void createParentPropertyIndex()
- {
- final OIndex result = oSuperClass.createIndex( "ClassIndexTestParentPropertyNine", OClass.INDEX_TYPE.UNIQUE, "fNine" );
- assertEquals( result.getName(), "ClassIndexTestParentPropertyNine" );
- assertEquals( oSuperClass.getClassIndex( "ClassIndexTestParentPropertyNine" ).getName(), result.getName() );
+ public void createParentPropertyIndex() {
+ final OIndex result = oSuperClass.createIndex("ClassIndexTestParentPropertyNine", OClass.INDEX_TYPE.UNIQUE, "fNine");
+
+ assertEquals(result.getName(), "ClassIndexTestParentPropertyNine");
+ assertEquals(oSuperClass.getClassIndex("ClassIndexTestParentPropertyNine").getName(), result.getName());
}
- private boolean containsIndex( final Collection<? extends OIndex> classIndexes, final String indexName )
- {
- for( final OIndex index : classIndexes ) {
- if ( index.getName().equals( indexName ) ) {
+ private boolean containsIndex(final Collection<? extends OIndex> classIndexes, final String indexName) {
+ for (final OIndex index : classIndexes) {
+ if (index.getName().equals(indexName)) {
return true;
}
}
@@ -1078,12 +1276,11 @@ private boolean containsIndex( final Collection<? extends OIndex> classIndexes,
}
@Test
- public void testDropProperty() throws Exception
- {
- oClass.createProperty( "fFive", OType.INTEGER );
+ public void testDropProperty() throws Exception {
+ oClass.createProperty("fFive", OType.INTEGER);
- oClass.dropProperty( "fFive" );
+ oClass.dropProperty("fFive");
- assertNull( oClass.getProperty( "fFive" ) );
+ assertNull(oClass.getProperty("fFive"));
}
}
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectIndexReuseTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectIndexReuseTest.java
index 6bb66684a70..c514c25cf63 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectIndexReuseTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectIndexReuseTest.java
@@ -2,8 +2,10 @@
import java.util.ArrayList;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
@@ -18,2014 +20,2416 @@
import com.orientechnologies.orient.core.sql.OCommandSQL;
import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery;
-@Test(groups = {"index"})
+@Test(groups = { "index" })
public class SQLSelectIndexReuseTest extends AbstractIndexReuseTest {
- @Parameters(value = "url")
- public SQLSelectIndexReuseTest(final String iURL) {
- super(iURL);
- }
-
- @BeforeClass
- public void beforeClass() throws Exception {
- if (database.isClosed()) {
- database.open("admin", "admin");
- }
-
- final OSchema schema = database.getMetadata().getSchema();
- final OClass oClass = schema.createClass("sqlSelectIndexReuseTestClass");
-
- oClass.createProperty("prop1", OType.INTEGER);
- oClass.createProperty("prop2", OType.INTEGER);
- oClass.createProperty("prop3", OType.INTEGER);
- oClass.createProperty("prop4", OType.INTEGER);
- oClass.createProperty("prop5", OType.INTEGER);
- oClass.createProperty("prop6", OType.INTEGER);
- oClass.createProperty("prop7", OType.STRING);
- oClass.createProperty("fEmbeddedMap", OType.EMBEDDEDMAP, OType.INTEGER);
- oClass.createProperty("fLinkMap", OType.LINKMAP);
- oClass.createProperty("fEmbeddedList", OType.EMBEDDEDLIST, OType.INTEGER);
- oClass.createProperty("fLinkList", OType.LINKLIST);
-
- oClass.createIndex("indexone", OClass.INDEX_TYPE.UNIQUE, "prop1", "prop2");
- oClass.createIndex("indextwo", OClass.INDEX_TYPE.UNIQUE, "prop3");
- oClass.createIndex("indexthree", OClass.INDEX_TYPE.NOTUNIQUE, "prop1", "prop2", "prop4");
- oClass.createIndex("indexfour", OClass.INDEX_TYPE.NOTUNIQUE, "prop4", "prop1", "prop3");
- oClass.createIndex("indexfive", OClass.INDEX_TYPE.NOTUNIQUE, "prop6", "prop1", "prop3");
- oClass.createIndex("indexsix", OClass.INDEX_TYPE.FULLTEXT, "prop7");
- oClass.createIndex("sqlSelectIndexReuseTestEmbeddedMapByKey", OClass.INDEX_TYPE.NOTUNIQUE, "fEmbeddedMap");
- oClass.createIndex("sqlSelectIndexReuseTestEmbeddedMapByValue", OClass.INDEX_TYPE.NOTUNIQUE, "fEmbeddedMap by value");
- oClass.createIndex("sqlSelectIndexReuseTestEmbeddedList", OClass.INDEX_TYPE.NOTUNIQUE, "fEmbeddedList");
-
- schema.save();
-
- final String fullTextIndexStrings[] = {"Alice : What is the use of a book, without pictures or conversations?",
- "Rabbit : Oh my ears and whiskers, how late it's getting!",
- "Alice : If it had grown up, it would have made a dreadfully ugly child; but it makes rather a handsome pig, I think",
- "The Cat : We're all mad here.", "The Hatter : Why is a raven like a writing desk?",
- "The Hatter : Twinkle, twinkle, little bat! How I wonder what you're at.", "The Queen : Off with her head!",
- "The Duchess : Tut, tut, child! Everything's got a moral, if only you can find it.",
- "The Duchess : Take care of the sense, and the sounds will take care of themselves.",
- "The King : Begin at the beginning and go on till you come to the end: then stop."};
-
- for (int i = 0; i < 10; i++) {
- final Map<String, Integer> embeddedMap = new HashMap<String, Integer>();
-
- embeddedMap.put("key" + (i * 10 + 1), i * 10 + 1);
- embeddedMap.put("key" + (i * 10 + 2), i * 10 + 2);
- embeddedMap.put("key" + (i * 10 + 3), i * 10 + 3);
- embeddedMap.put("key" + (i * 10 + 4), i * 10 + 1);
-
- final List<Integer> embeddedList = new ArrayList<Integer>(3);
- embeddedList.add(i * 3);
- embeddedList.add(i * 3 + 1);
- embeddedList.add(i * 3 + 2);
-
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument("sqlSelectIndexReuseTestClass");
- document.field("prop1", i);
- document.field("prop2", j);
- document.field("prop3", i * 10 + j);
-
- document.field("prop4", i);
- document.field("prop5", i);
-
- document.field("prop6", j);
-
- document.field("prop7", fullTextIndexStrings[i]);
-
- document.field("fEmbeddedMap", embeddedMap);
-
- document.field("fEmbeddedList", embeddedList);
-
- document.save();
- }
- }
- database.close();
- }
-
- @AfterClass
- public void afterClass() throws Exception {
- if (database.isClosed()) {
- database.open("admin", "admin");
- }
-
- database.command(new OCommandSQL("drop class sqlSelectIndexReuseTestClass")).execute();
- database.getMetadata().getSchema().reload();
- database.getLevel2Cache().clear();
-
- database.close();
- }
-
- @Test
- public void testCompositeSearchEquals() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 = 2")).execute();
-
- Assert.assertEquals(result.size(), 1);
-
- final ODocument document = result.get(0);
- Assert.assertEquals(document.<Integer>field("prop1").intValue(), 1);
- Assert.assertEquals(document.<Integer>field("prop2").intValue(), 2);
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchHasChainOperatorsEquals() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1.asInteger() = 1 and prop2 = 2"))
- .execute();
-
- Assert.assertEquals(result.size(), 1);
-
- final ODocument document = result.get(0);
- Assert.assertEquals(document.<Integer>field("prop1").intValue(), 1);
- Assert.assertEquals(document.<Integer>field("prop2").intValue(), 2);
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
- }
-
- @Test
- public void testCompositeSearchEqualsOneField() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1")).execute();
-
- Assert.assertEquals(result.size(), 10);
-
- for (int i = 0; i < 10; i++) {
- final ODocument document = new ODocument();
- document.field("prop1", 1);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testNoCompositeSearchEquals() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 = 1")).execute();
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
- Assert.assertEquals(result.size(), 10);
-
- for (int i = 0; i < 10; i++) {
- final ODocument document = new ODocument();
- document.field("prop1", i);
- document.field("prop2", 1);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- @Test
- public void testCompositeSearchEqualsWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = ? and prop2 = ?")).execute(1, 2);
-
- Assert.assertEquals(result.size(), 1);
-
- final ODocument document = result.get(0);
- Assert.assertEquals(document.<Integer>field("prop1").intValue(), 1);
- Assert.assertEquals(document.<Integer>field("prop2").intValue(), 2);
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchEqualsOneFieldWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = ?")).execute(1);
-
- Assert.assertEquals(result.size(), 10);
-
- for (int i = 0; i < 10; i++) {
- final ODocument document = new ODocument();
- document.field("prop1", 1);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testNoCompositeSearchEqualsWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 = ?")).execute(1);
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
- Assert.assertEquals(result.size(), 10);
-
- for (int i = 0; i < 10; i++) {
- final ODocument document = new ODocument();
- document.field("prop1", i);
- document.field("prop2", 1);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- @Test
- public void testCompositeSearchGT() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 > 2")).execute();
-
- Assert.assertEquals(result.size(), 7);
-
- for (int i = 3; i < 10; i++) {
- final ODocument document = new ODocument();
- document.field("prop1", 1);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchGTOneField() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 > 7")).execute();
-
- Assert.assertEquals(result.size(), 20);
-
- for (int i = 8; i < 10; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", i);
- document.field("prop2", j);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchGTOneFieldNoSearch() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 > 7")).execute();
-
- Assert.assertEquals(result.size(), 20);
-
- for (int i = 8; i < 10; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", j);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
- }
-
- @Test
- public void testCompositeSearchGTWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = ? and prop2 > ?")).execute(1, 2);
-
- Assert.assertEquals(result.size(), 7);
-
- for (int i = 3; i < 10; i++) {
- final ODocument document = new ODocument();
- document.field("prop1", 1);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchGTOneFieldWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 > ?")).execute(7);
-
- Assert.assertEquals(result.size(), 20);
-
- for (int i = 8; i < 10; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", i);
- document.field("prop2", j);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchGTOneFieldNoSearchWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 > ?")).execute(7);
-
- Assert.assertEquals(result.size(), 20);
-
- for (int i = 8; i < 10; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", j);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
- }
-
- @Test
- public void testCompositeSearchGTQ() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 >= 2")).execute();
-
- Assert.assertEquals(result.size(), 8);
-
- for (int i = 2; i < 10; i++) {
- final ODocument document = new ODocument();
- document.field("prop1", 1);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchGTQOneField() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 >= 7")).execute();
-
- Assert.assertEquals(result.size(), 30);
-
- for (int i = 7; i < 10; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", i);
- document.field("prop2", j);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchGTQOneFieldNoSearch() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 >= 7")).execute();
-
- Assert.assertEquals(result.size(), 30);
-
- for (int i = 7; i < 10; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", j);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
- }
-
- @Test
- public void testCompositeSearchGTQWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = ? and prop2 >= ?")).execute(1, 2);
-
- Assert.assertEquals(result.size(), 8);
-
- for (int i = 2; i < 10; i++) {
- final ODocument document = new ODocument();
- document.field("prop1", 1);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchGTQOneFieldWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 >= ?")).execute(7);
-
- Assert.assertEquals(result.size(), 30);
-
- for (int i = 7; i < 10; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", i);
- document.field("prop2", j);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchGTQOneFieldNoSearchWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 >= ?")).execute(7);
-
- Assert.assertEquals(result.size(), 30);
-
- for (int i = 7; i < 10; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", j);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
- }
-
- @Test
- public void testCompositeSearchLTQ() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 <= 2")).execute();
-
- Assert.assertEquals(result.size(), 3);
-
- for (int i = 0; i <= 2; i++) {
- final ODocument document = new ODocument();
- document.field("prop1", 1);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
-
- }
-
- @Test
- public void testCompositeSearchLTQOneField() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 <= 7")).execute();
-
- Assert.assertEquals(result.size(), 80);
-
- for (int i = 0; i <= 7; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", i);
- document.field("prop2", j);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchLTQOneFieldNoSearch() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 <= 7")).execute();
-
- Assert.assertEquals(result.size(), 80);
-
- for (int i = 0; i <= 7; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", j);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
- }
-
- @Test
- public void testCompositeSearchLTQWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = ? and prop2 <= ?")).execute(1, 2);
-
- Assert.assertEquals(result.size(), 3);
-
- for (int i = 0; i <= 2; i++) {
- final ODocument document = new ODocument();
- document.field("prop1", 1);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchLTQOneFieldWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 <= ?")).execute(7);
-
- Assert.assertEquals(result.size(), 80);
-
- for (int i = 0; i <= 7; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", i);
- document.field("prop2", j);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchLTQOneFieldNoSearchWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 <= ?")).execute(7);
-
- Assert.assertEquals(result.size(), 80);
-
- for (int i = 0; i <= 7; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", j);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
- }
-
- @Test
- public void testCompositeSearchLT() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 < 2")).execute();
-
- Assert.assertEquals(result.size(), 2);
-
- for (int i = 0; i < 2; i++) {
- final ODocument document = new ODocument();
- document.field("prop1", 1);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchLTOneField() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 < 7")).execute();
-
- Assert.assertEquals(result.size(), 70);
-
- for (int i = 0; i < 7; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", i);
- document.field("prop2", j);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchLTOneFieldNoSearch() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 < 7")).execute();
-
- Assert.assertEquals(result.size(), 70);
-
- for (int i = 0; i < 7; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", j);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
- }
-
- @Test
- public void testCompositeSearchLTWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = ? and prop2 < ?")).execute(1, 2);
-
- Assert.assertEquals(result.size(), 2);
-
- for (int i = 0; i < 2; i++) {
- final ODocument document = new ODocument();
- document.field("prop1", 1);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchLTOneFieldWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 < ?")).execute(7);
-
- Assert.assertEquals(result.size(), 70);
-
- for (int i = 0; i < 7; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", i);
- document.field("prop2", j);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchLTOneFieldNoSearchWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 < ?")).execute(7);
-
- Assert.assertEquals(result.size(), 70);
-
- for (int i = 0; i < 7; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", j);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
- }
-
- @Test
- public void testCompositeSearchBetween() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 between 1 and 3"))
- .execute();
-
- Assert.assertEquals(result.size(), 3);
-
- for (int i = 1; i <= 3; i++) {
- final ODocument document = new ODocument();
- document.field("prop1", 1);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchBetweenOneField() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 between 1 and 3")).execute();
-
- Assert.assertEquals(result.size(), 30);
-
- for (int i = 1; i <= 3; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", i);
- document.field("prop2", j);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchBetweenOneFieldNoSearch() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 between 1 and 3")).execute();
-
- Assert.assertEquals(result.size(), 30);
-
- for (int i = 1; i <= 3; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", j);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
- }
-
- @Test
- public void testCompositeSearchBetweenWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 between ? and ?"))
- .execute(1, 3);
-
- Assert.assertEquals(result.size(), 3);
-
- for (int i = 1; i <= 3; i++) {
- final ODocument document = new ODocument();
- document.field("prop1", 1);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchBetweenOneFieldWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 between ? and ?")).execute(1, 3);
-
- Assert.assertEquals(result.size(), 30);
-
- for (int i = 1; i <= 3; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", i);
- document.field("prop2", j);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testCompositeSearchBetweenOneFieldNoSearchWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 between ? and ?")).execute(1, 3);
-
- Assert.assertEquals(result.size(), 30);
-
- for (int i = 1; i <= 3; i++) {
- for (int j = 0; j < 10; j++) {
- final ODocument document = new ODocument();
- document.field("prop1", j);
- document.field("prop2", i);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
- }
-
- @Test
- public void testSingleSearchEquals() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 = 1")).execute();
-
- Assert.assertEquals(result.size(), 1);
-
- final ODocument document = result.get(0);
- Assert.assertEquals(document.<Integer>field("prop3").intValue(), 1);
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
- }
-
- @Test
- public void testSingleSearchEqualsWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 = ?")).execute(1);
-
- Assert.assertEquals(result.size(), 1);
-
- final ODocument document = result.get(0);
- Assert.assertEquals(document.<Integer>field("prop3").intValue(), 1);
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
- }
-
- @Test
- public void testSingleSearchGT() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 > 90")).execute();
+ @Parameters(value = "url")
+ public SQLSelectIndexReuseTest(final String iURL) {
+ super(iURL);
+ }
- Assert.assertEquals(result.size(), 9);
+ @BeforeClass
+ public void beforeClass() throws Exception {
+ if (database.isClosed()) {
+ database.open("admin", "admin");
+ }
- for (int i = 91; i < 100; i++) {
- final ODocument document = new ODocument();
- document.field("prop3", i);
- Assert.assertEquals(containsDocument(result, document), 1);
- }
+ final OSchema schema = database.getMetadata().getSchema();
+ final OClass oClass = schema.createClass("sqlSelectIndexReuseTestClass");
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
- }
-
- @Test
- public void testSingleSearchGTWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ oClass.createProperty("prop1", OType.INTEGER);
+ oClass.createProperty("prop2", OType.INTEGER);
+ oClass.createProperty("prop3", OType.INTEGER);
+ oClass.createProperty("prop4", OType.INTEGER);
+ oClass.createProperty("prop5", OType.INTEGER);
+ oClass.createProperty("prop6", OType.INTEGER);
+ oClass.createProperty("prop7", OType.STRING);
+ oClass.createProperty("prop8", OType.INTEGER);
+ oClass.createProperty("prop9", OType.INTEGER);
+
+ oClass.createProperty("fEmbeddedMap", OType.EMBEDDEDMAP, OType.INTEGER);
+ oClass.createProperty("fEmbeddedMapTwo", OType.EMBEDDEDMAP, OType.INTEGER);
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
+ oClass.createProperty("fLinkMap", OType.LINKMAP);
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 > ?")).execute(90);
+ oClass.createProperty("fEmbeddedList", OType.EMBEDDEDLIST, OType.INTEGER);
+ oClass.createProperty("fEmbeddedListTwo", OType.EMBEDDEDLIST, OType.INTEGER);
- Assert.assertEquals(result.size(), 9);
+ oClass.createProperty("fLinkList", OType.LINKLIST);
- for (int i = 91; i < 100; i++) {
- final ODocument document = new ODocument();
- document.field("prop3", i);
- Assert.assertEquals(containsDocument(result, document), 1);
- }
+ oClass.createProperty("fEmbeddedSet", OType.EMBEDDEDSET, OType.INTEGER);
+ oClass.createProperty("fEmbeddedSetTwo", OType.EMBEDDEDSET, OType.INTEGER);
+
+ oClass.createIndex("indexone", OClass.INDEX_TYPE.UNIQUE, "prop1", "prop2");
+ oClass.createIndex("indextwo", OClass.INDEX_TYPE.UNIQUE, "prop3");
+ oClass.createIndex("indexthree", OClass.INDEX_TYPE.NOTUNIQUE, "prop1", "prop2", "prop4");
+ oClass.createIndex("indexfour", OClass.INDEX_TYPE.NOTUNIQUE, "prop4", "prop1", "prop3");
+ oClass.createIndex("indexfive", OClass.INDEX_TYPE.NOTUNIQUE, "prop6", "prop1", "prop3");
+ oClass.createIndex("indexsix", OClass.INDEX_TYPE.FULLTEXT, "prop7");
+
+ oClass.createIndex("sqlSelectIndexReuseTestEmbeddedMapByKey", OClass.INDEX_TYPE.NOTUNIQUE, "fEmbeddedMap");
+ oClass.createIndex("sqlSelectIndexReuseTestEmbeddedMapByValue", OClass.INDEX_TYPE.NOTUNIQUE, "fEmbeddedMap by value");
+ oClass.createIndex("sqlSelectIndexReuseTestEmbeddedList", OClass.INDEX_TYPE.NOTUNIQUE, "fEmbeddedList");
+
+ oClass.createIndex("sqlSelectIndexReuseTestEmbeddedMapByKeyProp8", OClass.INDEX_TYPE.NOTUNIQUE, "fEmbeddedMapTwo", "prop8");
+ oClass.createIndex("sqlSelectIndexReuseTestEmbeddedMapByValueProp8", OClass.INDEX_TYPE.NOTUNIQUE, "fEmbeddedMapTwo by value",
+ "prop8");
+
+ oClass.createIndex("sqlSelectIndexReuseTestEmbeddedSetProp8", OClass.INDEX_TYPE.NOTUNIQUE, "fEmbeddedSetTwo", "prop8");
+ oClass.createIndex("sqlSelectIndexReuseTestProp9EmbeddedSetProp8", OClass.INDEX_TYPE.NOTUNIQUE, "prop9", "fEmbeddedSetTwo",
+ "prop8");
+
+ oClass.createIndex("sqlSelectIndexReuseTestEmbeddedListTwoProp8", OClass.INDEX_TYPE.NOTUNIQUE, "fEmbeddedListTwo", "prop8");
+
+ schema.save();
+
+ final String fullTextIndexStrings[] = { "Alice : What is the use of a book, without pictures or conversations?",
+ "Rabbit : Oh my ears and whiskers, how late it's getting!",
+ "Alice : If it had grown up, it would have made a dreadfully ugly child; but it makes rather a handsome pig, I think",
+ "The Cat : We're all mad here.", "The Hatter : Why is a raven like a writing desk?",
+ "The Hatter : Twinkle, twinkle, little bat! How I wonder what you're at.", "The Queen : Off with her head!",
+ "The Duchess : Tut, tut, child! Everything's got a moral, if only you can find it.",
+ "The Duchess : Take care of the sense, and the sounds will take care of themselves.",
+ "The King : Begin at the beginning and go on till you come to the end: then stop." };
+
+ for (int i = 0; i < 10; i++) {
+ final Map<String, Integer> embeddedMap = new HashMap<String, Integer>();
+
+ embeddedMap.put("key" + (i * 10 + 1), i * 10 + 1);
+ embeddedMap.put("key" + (i * 10 + 2), i * 10 + 2);
+ embeddedMap.put("key" + (i * 10 + 3), i * 10 + 3);
+ embeddedMap.put("key" + (i * 10 + 4), i * 10 + 1);
+
+ final List<Integer> embeddedList = new ArrayList<Integer>(3);
+ embeddedList.add(i * 3);
+ embeddedList.add(i * 3 + 1);
+ embeddedList.add(i * 3 + 2);
+
+ final Set<Integer> embeddedSet = new HashSet<Integer>();
+ embeddedSet.add(i * 10);
+ embeddedSet.add(i * 10 + 1);
+ embeddedSet.add(i * 10 + 2);
+
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument("sqlSelectIndexReuseTestClass");
+ document.field("prop1", i);
+ document.field("prop2", j);
+ document.field("prop3", i * 10 + j);
+
+ document.field("prop4", i);
+ document.field("prop5", i);
+
+ document.field("prop6", j);
+
+ document.field("prop7", fullTextIndexStrings[i]);
+
+ document.field("prop8", j);
+
+ document.field("prop9", j % 2);
+
+ document.field("fEmbeddedMap", embeddedMap);
+ document.field("fEmbeddedMapTwo", embeddedMap);
+
+ document.field("fEmbeddedList", embeddedList);
+ document.field("fEmbeddedListTwo", embeddedList);
+
+ document.field("fEmbeddedSet", embeddedSet);
+ document.field("fEmbeddedSetTwo", embeddedSet);
+
+ document.save();
+ }
+ }
+ database.close();
+ }
+
+ @AfterClass
+ public void afterClass() throws Exception {
+ if (database.isClosed()) {
+ database.open("admin", "admin");
+ }
+
+ database.command(new OCommandSQL("drop class sqlSelectIndexReuseTestClass")).execute();
+ database.getMetadata().getSchema().reload();
+ database.getLevel2Cache().clear();
+
+ database.close();
+ }
+
+ @Test
+ public void testCompositeSearchEquals() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 = 2")).execute();
+
+ Assert.assertEquals(result.size(), 1);
+
+ final ODocument document = result.get(0);
+ Assert.assertEquals(document.<Integer> field("prop1").intValue(), 1);
+ Assert.assertEquals(document.<Integer> field("prop2").intValue(), 2);
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchHasChainOperatorsEquals() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1.asInteger() = 1 and prop2 = 2"))
+ .execute();
+
+ Assert.assertEquals(result.size(), 1);
+
+ final ODocument document = result.get(0);
+ Assert.assertEquals(document.<Integer> field("prop1").intValue(), 1);
+ Assert.assertEquals(document.<Integer> field("prop2").intValue(), 2);
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
+ }
+
+ @Test
+ public void testCompositeSearchEqualsOneField() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+ long oldCompositeIndexUsage21 = profiler.getCounter("Query.compositeIndexUsage.2.1");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1)
+ oldCompositeIndexUsage2 = 0;
+
+ if (oldCompositeIndexUsage21 == -1)
+ oldCompositeIndexUsage2 = 0;
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1")).execute();
+
+ Assert.assertEquals(result.size(), 10);
+
+ for (int i = 0; i < 10; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", 1);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2.1"), oldCompositeIndexUsage21 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchEqualsOneFieldMapIndexByKey() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+ long oldCompositeIndexUsage21 = profiler.getCounter("Query.compositeIndexUsage.2.1");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+
+ if (oldCompositeIndexUsage2 == -1)
+ oldCompositeIndexUsage2 = 0;
+
+ if (oldCompositeIndexUsage21 == -1)
+ oldCompositeIndexUsage21 = 0;
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where fEmbeddedMapTwo containsKey 'key11'"))
+ .execute();
+
+ Assert.assertEquals(result.size(), 10);
+
+ final Map<String, Integer> embeddedMap = new HashMap<String, Integer>();
+
+ embeddedMap.put("key11", 11);
+ embeddedMap.put("key12", 12);
+ embeddedMap.put("key13", 13);
+ embeddedMap.put("key14", 11);
+
+ for (int i = 0; i < 10; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop8", 1);
+ document.field("fEmbeddedMapTwo", embeddedMap);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2.1"), oldCompositeIndexUsage21 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchEqualsMapIndexByKey() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+ long oldCompositeIndexUsage22 = profiler.getCounter("Query.compositeIndexUsage.2.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ if (oldCompositeIndexUsage22 == -1)
+ oldCompositeIndexUsage22 = 0;
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass "
+ + "where prop8 = 1 and fEmbeddedMapTwo containsKey 'key11'")).execute();
+
+ final Map<String, Integer> embeddedMap = new HashMap<String, Integer>();
+
+ embeddedMap.put("key11", 11);
+ embeddedMap.put("key12", 12);
+ embeddedMap.put("key13", 13);
+ embeddedMap.put("key14", 11);
+
+ Assert.assertEquals(result.size(), 1);
+
+ final ODocument document = new ODocument();
+ document.field("prop8", 1);
+ document.field("fEmbeddedMap", embeddedMap);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2.2"), oldCompositeIndexUsage22 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchEqualsOneFieldMapIndexByValue() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+ long oldCompositeIndexUsage21 = profiler.getCounter("Query.compositeIndexUsage.2.1");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+ if (oldCompositeIndexUsage21 == -1) {
+ oldCompositeIndexUsage21 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass " + "where fEmbeddedMapTwo containsValue 22"))
+ .execute();
+
+ final Map<String, Integer> embeddedMap = new HashMap<String, Integer>();
+
+ embeddedMap.put("key21", 21);
+ embeddedMap.put("key22", 22);
+ embeddedMap.put("key23", 23);
+ embeddedMap.put("key24", 21);
+
+ Assert.assertEquals(result.size(), 10);
+
+ for (int i = 0; i < 10; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop8", i);
+ document.field("fEmbeddedMapTwo", embeddedMap);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2.1"), oldCompositeIndexUsage21 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchEqualsMapIndexByValue() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+ long oldCompositeIndexUsage22 = profiler.getCounter("Query.compositeIndexUsage.2.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+ if (oldCompositeIndexUsage22 == -1)
+ oldCompositeIndexUsage22 = 0;
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass "
+ + "where prop8 = 1 and fEmbeddedMapTwo containsValue 22")).execute();
+
+ final Map<String, Integer> embeddedMap = new HashMap<String, Integer>();
+
+ embeddedMap.put("key21", 21);
+ embeddedMap.put("key22", 22);
+ embeddedMap.put("key23", 23);
+ embeddedMap.put("key24", 21);
+
+ Assert.assertEquals(result.size(), 1);
+
+ final ODocument document = new ODocument();
+ document.field("prop8", 1);
+ document.field("fEmbeddedMap", embeddedMap);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2.2"), oldCompositeIndexUsage22 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchEqualsEmbeddedSetIndex() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+ long oldCompositeIndexUsage22 = profiler.getCounter("Query.compositeIndexUsage.2.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ if (oldCompositeIndexUsage22 == -1)
+ oldCompositeIndexUsage22 = 0;
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass "
+ + "where prop8 = 1 and fEmbeddedSetTwo contains 12")).execute();
+
+ final Set<Integer> embeddedSet = new HashSet<Integer>();
+ embeddedSet.add(10);
+ embeddedSet.add(11);
+ embeddedSet.add(12);
+
+ Assert.assertEquals(result.size(), 1);
+
+ final ODocument document = new ODocument();
+ document.field("prop8", 1);
+ document.field("fEmbeddedSet", embeddedSet);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2.2"), oldCompositeIndexUsage22 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchEqualsEmbeddedSetInMiddleIndex() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+ long oldCompositeIndexUsage3 = profiler.getCounter("Query.compositeIndexUsage.3");
+ long oldCompositeIndexUsage33 = profiler.getCounter("Query.compositeIndexUsage.3.3");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ if (oldCompositeIndexUsage3 == -1)
+ oldCompositeIndexUsage3 = 0;
+
+ if (oldCompositeIndexUsage33 == -1)
+ oldCompositeIndexUsage33 = 0;
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass "
+ + "where prop9 = 0 and fEmbeddedSetTwo contains 92 and prop8 > 2")).execute();
+
+ final Set<Integer> embeddedSet = new HashSet<Integer>(3);
+ embeddedSet.add(90);
+ embeddedSet.add(91);
+ embeddedSet.add(92);
+
+ Assert.assertEquals(result.size(), 3);
+
+ for (int i = 0; i < 3; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop8", i * 2 + 4);
+ document.field("prop9", 0);
+ document.field("fEmbeddedSet", embeddedSet);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.3"), oldCompositeIndexUsage3 + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.3.3"), oldCompositeIndexUsage33 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchEqualsOneFieldEmbeddedListIndex() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+ long oldCompositeIndexUsage21 = profiler.getCounter("Query.compositeIndexUsage.2.1");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+
+ if (oldCompositeIndexUsage2 == -1)
+ oldCompositeIndexUsage2 = 0;
+
+ if (oldCompositeIndexUsage21 == -1)
+ oldCompositeIndexUsage21 = 0;
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where fEmbeddedListTwo contains 4")).execute();
+
+ Assert.assertEquals(result.size(), 10);
+
+ final List<Integer> embeddedList = new ArrayList<Integer>(3);
+ embeddedList.add(3);
+ embeddedList.add(4);
+ embeddedList.add(5);
+
+ for (int i = 0; i < 10; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop8", i);
+ document.field("fEmbeddedListTwo", embeddedList);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2.1"), oldCompositeIndexUsage21 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchEqualsEmbeddedListIndex() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+ long oldCompositeIndexUsage22 = profiler.getCounter("Query.compositeIndexUsage.2.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+ if (oldCompositeIndexUsage22 == -1)
+ oldCompositeIndexUsage22 = 0;
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where"
+ + " prop8 = 1 and fEmbeddedListTwo contains 4")).execute();
+
+ Assert.assertEquals(result.size(), 1);
+
+ final List<Integer> embeddedList = new ArrayList<Integer>(3);
+ embeddedList.add(3);
+ embeddedList.add(4);
+ embeddedList.add(5);
+
+ final ODocument document = new ODocument();
+ document.field("prop8", 1);
+ document.field("fEmbeddedListTwo", embeddedList);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2.2"), oldCompositeIndexUsage22 + 1);
+ }
+
+ @Test
+ public void testNoCompositeSearchEquals() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 = 1")).execute();
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
+ Assert.assertEquals(result.size(), 10);
+
+ for (int i = 0; i < 10; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", i);
+ document.field("prop2", 1);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ @Test
+ public void testCompositeSearchEqualsWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = ? and prop2 = ?")).execute(1, 2);
+
+ Assert.assertEquals(result.size(), 1);
+
+ final ODocument document = result.get(0);
+ Assert.assertEquals(document.<Integer> field("prop1").intValue(), 1);
+ Assert.assertEquals(document.<Integer> field("prop2").intValue(), 2);
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchEqualsOneFieldWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = ?")).execute(1);
+
+ Assert.assertEquals(result.size(), 10);
+
+ for (int i = 0; i < 10; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", 1);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testNoCompositeSearchEqualsWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 = ?")).execute(1);
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
+ Assert.assertEquals(result.size(), 10);
+
+ for (int i = 0; i < 10; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", i);
+ document.field("prop2", 1);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ @Test
+ public void testCompositeSearchGT() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 > 2")).execute();
+
+ Assert.assertEquals(result.size(), 7);
+
+ for (int i = 3; i < 10; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", 1);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchGTOneField() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 > 7")).execute();
+
+ Assert.assertEquals(result.size(), 20);
+
+ for (int i = 8; i < 10; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", i);
+ document.field("prop2", j);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchGTOneFieldNoSearch() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 > 7")).execute();
+
+ Assert.assertEquals(result.size(), 20);
+
+ for (int i = 8; i < 10; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", j);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
+ }
+
+ @Test
+ public void testCompositeSearchGTWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = ? and prop2 > ?")).execute(1, 2);
+
+ Assert.assertEquals(result.size(), 7);
+
+ for (int i = 3; i < 10; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", 1);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchGTOneFieldWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 > ?")).execute(7);
+
+ Assert.assertEquals(result.size(), 20);
+
+ for (int i = 8; i < 10; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", i);
+ document.field("prop2", j);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchGTOneFieldNoSearchWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 > ?")).execute(7);
+
+ Assert.assertEquals(result.size(), 20);
+
+ for (int i = 8; i < 10; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", j);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
+ }
+
+ @Test
+ public void testCompositeSearchGTQ() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 >= 2")).execute();
+
+ Assert.assertEquals(result.size(), 8);
+
+ for (int i = 2; i < 10; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", 1);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchGTQOneField() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 >= 7")).execute();
+
+ Assert.assertEquals(result.size(), 30);
+
+ for (int i = 7; i < 10; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", i);
+ document.field("prop2", j);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchGTQOneFieldNoSearch() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 >= 7")).execute();
+
+ Assert.assertEquals(result.size(), 30);
+
+ for (int i = 7; i < 10; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", j);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
+ }
+
+ @Test
+ public void testCompositeSearchGTQWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = ? and prop2 >= ?")).execute(1, 2);
+
+ Assert.assertEquals(result.size(), 8);
+
+ for (int i = 2; i < 10; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", 1);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchGTQOneFieldWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 >= ?")).execute(7);
+
+ Assert.assertEquals(result.size(), 30);
+
+ for (int i = 7; i < 10; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", i);
+ document.field("prop2", j);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchGTQOneFieldNoSearchWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 >= ?")).execute(7);
+
+ Assert.assertEquals(result.size(), 30);
+
+ for (int i = 7; i < 10; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", j);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
+ }
+
+ @Test
+ public void testCompositeSearchLTQ() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 <= 2")).execute();
+
+ Assert.assertEquals(result.size(), 3);
+
+ for (int i = 0; i <= 2; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", 1);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+
+ }
+
+ @Test
+ public void testCompositeSearchLTQOneField() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 <= 7")).execute();
+
+ Assert.assertEquals(result.size(), 80);
+
+ for (int i = 0; i <= 7; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", i);
+ document.field("prop2", j);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchLTQOneFieldNoSearch() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 <= 7")).execute();
+
+ Assert.assertEquals(result.size(), 80);
+
+ for (int i = 0; i <= 7; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", j);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
+ }
+
+ @Test
+ public void testCompositeSearchLTQWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = ? and prop2 <= ?")).execute(1, 2);
+
+ Assert.assertEquals(result.size(), 3);
+
+ for (int i = 0; i <= 2; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", 1);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchLTQOneFieldWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 <= ?")).execute(7);
+
+ Assert.assertEquals(result.size(), 80);
+
+ for (int i = 0; i <= 7; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", i);
+ document.field("prop2", j);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchLTQOneFieldNoSearchWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 <= ?")).execute(7);
+
+ Assert.assertEquals(result.size(), 80);
+
+ for (int i = 0; i <= 7; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", j);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
+ }
+
+ @Test
+ public void testCompositeSearchLT() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 < 2")).execute();
+
+ Assert.assertEquals(result.size(), 2);
+
+ for (int i = 0; i < 2; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", 1);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchLTOneField() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 < 7")).execute();
+
+ Assert.assertEquals(result.size(), 70);
+
+ for (int i = 0; i < 7; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", i);
+ document.field("prop2", j);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchLTOneFieldNoSearch() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 < 7")).execute();
+
+ Assert.assertEquals(result.size(), 70);
+
+ for (int i = 0; i < 7; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", j);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
+ }
+
+ @Test
+ public void testCompositeSearchLTWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = ? and prop2 < ?")).execute(1, 2);
+
+ Assert.assertEquals(result.size(), 2);
+
+ for (int i = 0; i < 2; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", 1);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchLTOneFieldWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 < ?")).execute(7);
+
+ Assert.assertEquals(result.size(), 70);
+
+ for (int i = 0; i < 7; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", i);
+ document.field("prop2", j);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchLTOneFieldNoSearchWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 < ?")).execute(7);
+
+ Assert.assertEquals(result.size(), 70);
+
+ for (int i = 0; i < 7; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", j);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
+ }
+
+ @Test
+ public void testCompositeSearchBetween() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 between 1 and 3"))
+ .execute();
+
+ Assert.assertEquals(result.size(), 3);
+
+ for (int i = 1; i <= 3; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", 1);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchBetweenOneField() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 between 1 and 3")).execute();
+
+ Assert.assertEquals(result.size(), 30);
+
+ for (int i = 1; i <= 3; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", i);
+ document.field("prop2", j);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchBetweenOneFieldNoSearch() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 between 1 and 3")).execute();
+
+ Assert.assertEquals(result.size(), 30);
+
+ for (int i = 1; i <= 3; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", j);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
+ }
+
+ @Test
+ public void testCompositeSearchBetweenWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 between ? and ?"))
+ .execute(1, 3);
+
+ Assert.assertEquals(result.size(), 3);
+
+ for (int i = 1; i <= 3; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", 1);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchBetweenOneFieldWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 between ? and ?")).execute(1, 3);
+
+ Assert.assertEquals(result.size(), 30);
+
+ for (int i = 1; i <= 3; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", i);
+ document.field("prop2", j);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testCompositeSearchBetweenOneFieldNoSearchWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop2 between ? and ?")).execute(1, 3);
+
+ Assert.assertEquals(result.size(), 30);
+
+ for (int i = 1; i <= 3; i++) {
+ for (int j = 0; j < 10; j++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", j);
+ document.field("prop2", i);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
+ }
+
+ @Test
+ public void testSingleSearchEquals() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 = 1")).execute();
+
+ Assert.assertEquals(result.size(), 1);
+
+ final ODocument document = result.get(0);
+ Assert.assertEquals(document.<Integer> field("prop3").intValue(), 1);
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
+ }
+
+ @Test
+ public void testSingleSearchEqualsWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 = ?")).execute(1);
+
+ Assert.assertEquals(result.size(), 1);
+
+ final ODocument document = result.get(0);
+ Assert.assertEquals(document.<Integer> field("prop3").intValue(), 1);
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
+ }
+
+ @Test
+ public void testSingleSearchGT() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 > 90")).execute();
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
- }
-
- @Test
- public void testSingleSearchGTQ() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ Assert.assertEquals(result.size(), 9);
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
+ for (int i = 91; i < 100; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop3", i);
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 >= 90")).execute();
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
+ }
+
+ @Test
+ public void testSingleSearchGTWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- Assert.assertEquals(result.size(), 10);
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
- for (int i = 90; i < 100; i++) {
- final ODocument document = new ODocument();
- document.field("prop3", i);
- Assert.assertEquals(containsDocument(result, document), 1);
- }
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 > ?")).execute(90);
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
- }
+ Assert.assertEquals(result.size(), 9);
- @Test
- public void testSingleSearchGTQWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ for (int i = 91; i < 100; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop3", i);
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
+ }
+
+ @Test
+ public void testSingleSearchGTQ() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 >= ?")).execute(90);
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
- Assert.assertEquals(result.size(), 10);
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 >= 90")).execute();
- for (int i = 90; i < 100; i++) {
- final ODocument document = new ODocument();
- document.field("prop3", i);
- Assert.assertEquals(containsDocument(result, document), 1);
- }
+ Assert.assertEquals(result.size(), 10);
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
- }
-
- @Test
- public void testSingleSearchLTQ() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ for (int i = 90; i < 100; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop3", i);
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
+ }
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 <= 10")).execute();
+ @Test
+ public void testSingleSearchGTQWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- Assert.assertEquals(result.size(), 11);
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
- for (int i = 0; i <= 10; i++) {
- final ODocument document = new ODocument();
- document.field("prop3", i);
- Assert.assertEquals(containsDocument(result, document), 1);
- }
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 >= ?")).execute(90);
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
- }
+ Assert.assertEquals(result.size(), 10);
- @Test
- public void testSingleSearchLTQWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ for (int i = 90; i < 100; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop3", i);
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
+ }
+
+ @Test
+ public void testSingleSearchLTQ() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 <= ?")).execute(10);
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
- Assert.assertEquals(result.size(), 11);
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 <= 10")).execute();
- for (int i = 0; i <= 10; i++) {
- final ODocument document = new ODocument();
- document.field("prop3", i);
- Assert.assertEquals(containsDocument(result, document), 1);
- }
+ Assert.assertEquals(result.size(), 11);
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
- }
+ for (int i = 0; i <= 10; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop3", i);
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
- @Test
- public void testSingleSearchLT() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
+ }
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
+ @Test
+ public void testSingleSearchLTQWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 < 10")).execute();
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
- Assert.assertEquals(result.size(), 10);
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 <= ?")).execute(10);
- for (int i = 0; i < 10; i++) {
- final ODocument document = new ODocument();
- document.field("prop3", i);
- Assert.assertEquals(containsDocument(result, document), 1);
- }
+ Assert.assertEquals(result.size(), 11);
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
- }
+ for (int i = 0; i <= 10; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop3", i);
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
- @Test
- public void testSingleSearchLTWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
+ }
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
+ @Test
+ public void testSingleSearchLT() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 < ?")).execute(10);
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
- Assert.assertEquals(result.size(), 10);
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 < 10")).execute();
- for (int i = 0; i < 10; i++) {
- final ODocument document = new ODocument();
- document.field("prop3", i);
- Assert.assertEquals(containsDocument(result, document), 1);
- }
+ Assert.assertEquals(result.size(), 10);
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
- }
+ for (int i = 0; i < 10; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop3", i);
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
- @Test
- public void testSingleSearchBetween() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 between 1 and 10")).execute();
-
- Assert.assertEquals(result.size(), 10);
-
- for (int i = 1; i <= 10; i++) {
- final ODocument document = new ODocument();
- document.field("prop3", i);
- Assert.assertEquals(containsDocument(result, document), 1);
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
- }
-
- @Test
- public void testSingleSearchBetweenWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 between ? and ?")).execute(1, 10);
-
- Assert.assertEquals(result.size(), 10);
-
- for (int i = 1; i <= 10; i++) {
- final ODocument document = new ODocument();
- document.field("prop3", i);
- Assert.assertEquals(containsDocument(result, document), 1);
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
- }
-
- @Test
- public void testSingleSearchIN() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 in [0, 5, 10]")).execute();
-
- Assert.assertEquals(result.size(), 3);
-
- for (int i = 0; i <= 10; i += 5) {
- final ODocument document = new ODocument();
- document.field("prop3", i);
- Assert.assertEquals(containsDocument(result, document), 1);
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
- }
-
- @Test
- public void testSingleSearchINWithArgs() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 in [?, ?, ?]")).execute(0, 5, 10);
-
- Assert.assertEquals(result.size(), 3);
-
- for (int i = 0; i <= 10; i += 5) {
- final ODocument document = new ODocument();
- document.field("prop3", i);
- Assert.assertEquals(containsDocument(result, document), 1);
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
- }
-
- @Test
- public void testMostSpecificOnesProcessedFirst() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 = 1 and prop3 = 11"))
- .execute();
-
- Assert.assertEquals(result.size(), 1);
-
- final ODocument document = result.get(0);
- Assert.assertEquals(document.<Integer>field("prop1").intValue(), 1);
- Assert.assertEquals(document.<Integer>field("prop2").intValue(), 1);
- Assert.assertEquals(document.<Integer>field("prop3").intValue(), 11);
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testTripleSearch() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage3 = profiler.getCounter("Query.compositeIndexUsage.3");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage3 == -1) {
- oldCompositeIndexUsage3 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 = 1 and prop4 >= 1"))
- .execute();
-
- Assert.assertEquals(result.size(), 1);
-
- final ODocument document = result.get(0);
- Assert.assertEquals(document.<Integer>field("prop1").intValue(), 1);
- Assert.assertEquals(document.<Integer>field("prop2").intValue(), 1);
- Assert.assertEquals(document.<Integer>field("prop4").intValue(), 1);
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.3"), oldCompositeIndexUsage3 + 1);
- }
-
- @Test
- public void testTripleSearchLastFieldNotInIndexFirstCase() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 = 1 and prop5 >= 1"))
- .execute();
-
- Assert.assertEquals(result.size(), 1);
-
- final ODocument document = result.get(0);
- Assert.assertEquals(document.<Integer>field("prop1").intValue(), 1);
- Assert.assertEquals(document.<Integer>field("prop2").intValue(), 1);
- Assert.assertEquals(document.<Integer>field("prop5").intValue(), 1);
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testTripleSearchLastFieldNotInIndexSecondCase() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop4 >= 1")).execute();
-
- Assert.assertEquals(result.size(), 10);
-
- for (int i = 0; i < 10; i++) {
- final ODocument document = new ODocument();
- document.field("prop1", 1);
- document.field("prop2", i);
- document.field("prop4", 1);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testTripleSearchLastFieldInIndex() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage3 = profiler.getCounter("Query.compositeIndexUsage.3");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage3 == -1) {
- oldCompositeIndexUsage3 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop4 = 1")).execute();
-
- Assert.assertEquals(result.size(), 10);
-
- for (int i = 0; i < 10; i++) {
- final ODocument document = new ODocument();
- document.field("prop1", 1);
- document.field("prop2", i);
- document.field("prop4", 1);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.3"), oldCompositeIndexUsage3 + 1);
- }
-
- @Test
- public void testTripleSearchLastFieldsCanNotBeMerged() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage3 = profiler.getCounter("Query.compositeIndexUsage.3");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage3 == -1) {
- oldCompositeIndexUsage3 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop6 <= 1 and prop4 < 1")).execute();
-
- Assert.assertEquals(result.size(), 2);
-
- for (int i = 0; i < 2; i++) {
- final ODocument document = new ODocument();
- document.field("prop6", i);
- document.field("prop4", 0);
-
- Assert.assertEquals(containsDocument(result, document), 1);
- }
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.3"), oldCompositeIndexUsage3 + 1);
- }
-
- @Test
- public void testFullTextIndex() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop7 containstext 'Alice' ")).execute();
-
- Assert.assertEquals(result.size(), 20);
-
- final ODocument docOne = new ODocument();
- docOne.field("prop7", "Alice : What is the use of a book, without pictures or conversations?");
- Assert.assertEquals(containsDocument(result, docOne), 10);
-
- final ODocument docTwo = new ODocument();
- docTwo.field("prop7",
- "Alice : If it had grown up, it would have made a dreadfully ugly child; but it makes rather a handsome pig, I think");
- Assert.assertEquals(containsDocument(result, docTwo), 10);
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
- }
-
- @Test
- public void testLastFieldNotCompatibleOperator() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
+ }
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 + 1 = 3")).execute();
+ @Test
+ public void testSingleSearchLTWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- Assert.assertEquals(result.size(), 1);
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
- final ODocument document = result.get(0);
- Assert.assertEquals(document.<Integer>field("prop1").intValue(), 1);
- Assert.assertEquals(document.<Integer>field("prop2").intValue(), 2);
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 < ?")).execute(10);
- @Test
- public void testEmbeddedMapByKeyIndexReuse() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+ Assert.assertEquals(result.size(), 10);
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
+ for (int i = 0; i < 10; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop3", i);
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where fEmbeddedMap containskey 'key12'"))
- .execute();
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
+ }
- Assert.assertEquals(result.size(), 10);
+ @Test
+ public void testSingleSearchBetween() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 between 1 and 10")).execute();
+
+ Assert.assertEquals(result.size(), 10);
+
+ for (int i = 1; i <= 10; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop3", i);
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
+ }
+
+ @Test
+ public void testSingleSearchBetweenWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 between ? and ?")).execute(1, 10);
+
+ Assert.assertEquals(result.size(), 10);
+
+ for (int i = 1; i <= 10; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop3", i);
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
+ }
+
+ @Test
+ public void testSingleSearchIN() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 in [0, 5, 10]")).execute();
+
+ Assert.assertEquals(result.size(), 3);
+
+ for (int i = 0; i <= 10; i += 5) {
+ final ODocument document = new ODocument();
+ document.field("prop3", i);
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
+ }
+
+ @Test
+ public void testSingleSearchINWithArgs() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop3 in [?, ?, ?]")).execute(0, 5, 10);
+
+ Assert.assertEquals(result.size(), 3);
+
+ for (int i = 0; i <= 10; i += 5) {
+ final ODocument document = new ODocument();
+ document.field("prop3", i);
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
+ }
+
+ @Test
+ public void testMostSpecificOnesProcessedFirst() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 = 1 and prop3 = 11"))
+ .execute();
+
+ Assert.assertEquals(result.size(), 1);
+
+ final ODocument document = result.get(0);
+ Assert.assertEquals(document.<Integer> field("prop1").intValue(), 1);
+ Assert.assertEquals(document.<Integer> field("prop2").intValue(), 1);
+ Assert.assertEquals(document.<Integer> field("prop3").intValue(), 11);
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testTripleSearch() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage3 = profiler.getCounter("Query.compositeIndexUsage.3");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage3 == -1) {
+ oldCompositeIndexUsage3 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 = 1 and prop4 >= 1"))
+ .execute();
+
+ Assert.assertEquals(result.size(), 1);
+
+ final ODocument document = result.get(0);
+ Assert.assertEquals(document.<Integer> field("prop1").intValue(), 1);
+ Assert.assertEquals(document.<Integer> field("prop2").intValue(), 1);
+ Assert.assertEquals(document.<Integer> field("prop4").intValue(), 1);
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.3"), oldCompositeIndexUsage3 + 1);
+ }
+
+ @Test
+ public void testTripleSearchLastFieldNotInIndexFirstCase() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 = 1 and prop5 >= 1"))
+ .execute();
+
+ Assert.assertEquals(result.size(), 1);
+
+ final ODocument document = result.get(0);
+ Assert.assertEquals(document.<Integer> field("prop1").intValue(), 1);
+ Assert.assertEquals(document.<Integer> field("prop2").intValue(), 1);
+ Assert.assertEquals(document.<Integer> field("prop5").intValue(), 1);
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testTripleSearchLastFieldNotInIndexSecondCase() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop4 >= 1")).execute();
+
+ Assert.assertEquals(result.size(), 10);
+
+ for (int i = 0; i < 10; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", 1);
+ document.field("prop2", i);
+ document.field("prop4", 1);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testTripleSearchLastFieldInIndex() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage3 = profiler.getCounter("Query.compositeIndexUsage.3");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage3 == -1) {
+ oldCompositeIndexUsage3 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop4 = 1")).execute();
+
+ Assert.assertEquals(result.size(), 10);
+
+ for (int i = 0; i < 10; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop1", 1);
+ document.field("prop2", i);
+ document.field("prop4", 1);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.3"), oldCompositeIndexUsage3 + 1);
+ }
+
+ @Test
+ public void testTripleSearchLastFieldsCanNotBeMerged() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage3 = profiler.getCounter("Query.compositeIndexUsage.3");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage3 == -1) {
+ oldCompositeIndexUsage3 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop6 <= 1 and prop4 < 1")).execute();
+
+ Assert.assertEquals(result.size(), 2);
+
+ for (int i = 0; i < 2; i++) {
+ final ODocument document = new ODocument();
+ document.field("prop6", i);
+ document.field("prop4", 0);
+
+ Assert.assertEquals(containsDocument(result, document), 1);
+ }
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.3"), oldCompositeIndexUsage3 + 1);
+ }
+
+ @Test
+ public void testFullTextIndex() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop7 containstext 'Alice' ")).execute();
+
+ Assert.assertEquals(result.size(), 20);
+
+ final ODocument docOne = new ODocument();
+ docOne.field("prop7", "Alice : What is the use of a book, without pictures or conversations?");
+ Assert.assertEquals(containsDocument(result, docOne), 10);
+
+ final ODocument docTwo = new ODocument();
+ docTwo.field("prop7",
+ "Alice : If it had grown up, it would have made a dreadfully ugly child; but it makes rather a handsome pig, I think");
+ Assert.assertEquals(containsDocument(result, docTwo), 10);
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
+ }
+
+ @Test
+ public void testLastFieldNotCompatibleOperator() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
- final ODocument document = new ODocument();
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 + 1 = 3")).execute();
- final Map<String, Integer> embeddedMap = new HashMap<String, Integer>();
+ Assert.assertEquals(result.size(), 1);
- embeddedMap.put("key11", 11);
- embeddedMap.put("key12", 12);
- embeddedMap.put("key13", 13);
- embeddedMap.put("key14", 11);
+ final ODocument document = result.get(0);
+ Assert.assertEquals(document.<Integer> field("prop1").intValue(), 1);
+ Assert.assertEquals(document.<Integer> field("prop2").intValue(), 2);
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
- document.field("fEmbeddedMap", embeddedMap);
+ @Test
+ public void testEmbeddedMapByKeyIndexReuse() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
- Assert.assertEquals(containsDocument(result, document), 10);
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2);
- }
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where fEmbeddedMap containskey 'key12'"))
+ .execute();
- @Test
- public void testEmbeddedMapBySpecificKeyIndexReuse() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+ Assert.assertEquals(result.size(), 10);
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
+ final ODocument document = new ODocument();
- final List<ODocument> result = database
- .command(
- new OSQLSynchQuery<ODocument>(
- "select * from sqlSelectIndexReuseTestClass where ( fEmbeddedMap containskey 'key12' ) and ( fEmbeddedMap['key12'] = 12 )"))
- .execute();
+ final Map<String, Integer> embeddedMap = new HashMap<String, Integer>();
- Assert.assertEquals(result.size(), 10);
+ embeddedMap.put("key11", 11);
+ embeddedMap.put("key12", 12);
+ embeddedMap.put("key13", 13);
+ embeddedMap.put("key14", 11);
- final ODocument document = new ODocument();
+ document.field("fEmbeddedMap", embeddedMap);
- final Map<String, Integer> embeddedMap = new HashMap<String, Integer>();
+ Assert.assertEquals(containsDocument(result, document), 10);
- embeddedMap.put("key11", 11);
- embeddedMap.put("key12", 12);
- embeddedMap.put("key13", 13);
- embeddedMap.put("key14", 11);
-
- document.field("fEmbeddedMap", embeddedMap);
-
- Assert.assertEquals(containsDocument(result, document), 10);
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2);
- }
-
- @Test
- public void testEmbeddedMapByValueIndexReuse() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2);
+ }
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
+ @Test
+ public void testEmbeddedMapBySpecificKeyIndexReuse() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where fEmbeddedMap containsvalue 11")).execute();
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
- Assert.assertEquals(result.size(), 10);
+ final List<ODocument> result = database
+ .command(
+ new OSQLSynchQuery<ODocument>(
+ "select * from sqlSelectIndexReuseTestClass where ( fEmbeddedMap containskey 'key12' ) and ( fEmbeddedMap['key12'] = 12 )"))
+ .execute();
- final ODocument document = new ODocument();
+ Assert.assertEquals(result.size(), 10);
- final Map<String, Integer> embeddedMap = new HashMap<String, Integer>();
+ final ODocument document = new ODocument();
- embeddedMap.put("key11", 11);
- embeddedMap.put("key12", 12);
- embeddedMap.put("key13", 13);
- embeddedMap.put("key14", 11);
-
- document.field("fEmbeddedMap", embeddedMap);
-
- Assert.assertEquals(containsDocument(result, document), 10);
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2);
- }
-
- @Test
- public void testEmbeddedListIndexReuse() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where fEmbeddedList contains 7")).execute();
-
- final List<Integer> embeddedList = new ArrayList<Integer>(3);
- embeddedList.add(6);
- embeddedList.add(7);
- embeddedList.add(8);
-
- final ODocument document = new ODocument();
- document.field("fEmbeddedList", embeddedList);
-
- Assert.assertEquals(containsDocument(result, document), 10);
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2);
- }
-
- @Test
- public void testNotIndexOperatorFirstCase() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>(
- "select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 = 2 and ( prop4 = 3 or prop4 = 1 )")).execute();
-
- Assert.assertEquals(result.size(), 1);
-
- final ODocument document = result.get(0);
- Assert.assertEquals(document.<Integer>field("prop1").intValue(), 1);
- Assert.assertEquals(document.<Integer>field("prop2").intValue(), 2);
- Assert.assertEquals(document.<Integer>field("prop4").intValue(), 1);
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
-
- @Test
- public void testNotIndexOperatorSecondCase() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>(
- "select * from sqlSelectIndexReuseTestClass where ( prop1 = 1 and prop2 = 2 ) or ( prop4 = 1 and prop6 = 2 )"))
- .execute();
-
- Assert.assertEquals(result.size(), 1);
-
- final ODocument document = result.get(0);
- Assert.assertEquals(document.<Integer>field("prop1").intValue(), 1);
- Assert.assertEquals(document.<Integer>field("prop2").intValue(), 2);
- Assert.assertEquals(document.<Integer>field("prop4").intValue(), 1);
- Assert.assertEquals(document.<Integer>field("prop6").intValue(), 2);
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
- }
-
- private int containsDocument(final List<ODocument> docList, final ODocument document) {
- int count = 0;
- for (final ODocument docItem : docList) {
- boolean containsAllFields = true;
- for (final String fieldName : document.fieldNames()) {
- if (!document.<Object>field(fieldName).equals(docItem.<Object>field(fieldName))) {
- containsAllFields = false;
- break;
- }
- }
- if (containsAllFields) {
- count++;
- }
- }
- return count;
- }
-
- @Test
- public void testCompositeIndexEmptyResult() {
- long oldIndexUsage = profiler.getCounter("Query.indexUsage");
- long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
- long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
-
- if (oldIndexUsage == -1) {
- oldIndexUsage = 0;
- }
- if (oldCompositeIndexUsage == -1) {
- oldCompositeIndexUsage = 0;
- }
- if (oldCompositeIndexUsage2 == -1) {
- oldCompositeIndexUsage2 = 0;
- }
-
- final List<ODocument> result = database.command(
- new OSQLSynchQuery<ODocument>(
- "select * from sqlSelectIndexReuseTestClass where prop1 = 1777 and prop2 = 2777")).execute();
-
- Assert.assertEquals(result.size(), 0);
-
- Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
- Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
- }
+ final Map<String, Integer> embeddedMap = new HashMap<String, Integer>();
+
+ embeddedMap.put("key11", 11);
+ embeddedMap.put("key12", 12);
+ embeddedMap.put("key13", 13);
+ embeddedMap.put("key14", 11);
+
+ document.field("fEmbeddedMap", embeddedMap);
+
+ Assert.assertEquals(containsDocument(result, document), 10);
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2);
+ }
+
+ @Test
+ public void testEmbeddedMapByValueIndexReuse() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where fEmbeddedMap containsvalue 11")).execute();
+
+ Assert.assertEquals(result.size(), 10);
+
+ final ODocument document = new ODocument();
+
+ final Map<String, Integer> embeddedMap = new HashMap<String, Integer>();
+
+ embeddedMap.put("key11", 11);
+ embeddedMap.put("key12", 12);
+ embeddedMap.put("key13", 13);
+ embeddedMap.put("key14", 11);
+
+ document.field("fEmbeddedMap", embeddedMap);
+
+ Assert.assertEquals(containsDocument(result, document), 10);
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2);
+ }
+
+ @Test
+ public void testEmbeddedListIndexReuse() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where fEmbeddedList contains 7")).execute();
+
+ final List<Integer> embeddedList = new ArrayList<Integer>(3);
+ embeddedList.add(6);
+ embeddedList.add(7);
+ embeddedList.add(8);
+
+ final ODocument document = new ODocument();
+ document.field("fEmbeddedList", embeddedList);
+
+ Assert.assertEquals(containsDocument(result, document), 10);
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2);
+ }
+
+ @Test
+ public void testNotIndexOperatorFirstCase() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>(
+ "select * from sqlSelectIndexReuseTestClass where prop1 = 1 and prop2 = 2 and ( prop4 = 3 or prop4 = 1 )")).execute();
+
+ Assert.assertEquals(result.size(), 1);
+
+ final ODocument document = result.get(0);
+ Assert.assertEquals(document.<Integer> field("prop1").intValue(), 1);
+ Assert.assertEquals(document.<Integer> field("prop2").intValue(), 2);
+ Assert.assertEquals(document.<Integer> field("prop4").intValue(), 1);
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
+
+ @Test
+ public void testNotIndexOperatorSecondCase() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>(
+ "select * from sqlSelectIndexReuseTestClass where ( prop1 = 1 and prop2 = 2 ) or ( prop4 = 1 and prop6 = 2 )"))
+ .execute();
+
+ Assert.assertEquals(result.size(), 1);
+
+ final ODocument document = result.get(0);
+ Assert.assertEquals(document.<Integer> field("prop1").intValue(), 1);
+ Assert.assertEquals(document.<Integer> field("prop2").intValue(), 2);
+ Assert.assertEquals(document.<Integer> field("prop4").intValue(), 1);
+ Assert.assertEquals(document.<Integer> field("prop6").intValue(), 2);
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage);
+ }
+
+ private int containsDocument(final List<ODocument> docList, final ODocument document) {
+ int count = 0;
+ for (final ODocument docItem : docList) {
+ boolean containsAllFields = true;
+ for (final String fieldName : document.fieldNames()) {
+ if (!document.<Object> field(fieldName).equals(docItem.<Object> field(fieldName))) {
+ containsAllFields = false;
+ break;
+ }
+ }
+ if (containsAllFields) {
+ count++;
+ }
+ }
+ return count;
+ }
+
+ @Test
+ public void testCompositeIndexEmptyResult() {
+ long oldIndexUsage = profiler.getCounter("Query.indexUsage");
+ long oldCompositeIndexUsage = profiler.getCounter("Query.compositeIndexUsage");
+ long oldCompositeIndexUsage2 = profiler.getCounter("Query.compositeIndexUsage.2");
+
+ if (oldIndexUsage == -1) {
+ oldIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage == -1) {
+ oldCompositeIndexUsage = 0;
+ }
+ if (oldCompositeIndexUsage2 == -1) {
+ oldCompositeIndexUsage2 = 0;
+ }
+
+ final List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>("select * from sqlSelectIndexReuseTestClass where prop1 = 1777 and prop2 = 2777")).execute();
+
+ Assert.assertEquals(result.size(), 0);
+
+ Assert.assertEquals(profiler.getCounter("Query.indexUsage"), oldIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage"), oldCompositeIndexUsage + 1);
+ Assert.assertEquals(profiler.getCounter("Query.compositeIndexUsage.2"), oldCompositeIndexUsage2 + 1);
+ }
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.