commit_id
stringlengths 40
40
| project
stringclasses 11
values | commit_message
stringlengths 3
3.04k
| type
stringclasses 3
values | url
stringclasses 11
values | git_diff
stringlengths 555
691k
|
|---|---|---|---|---|---|
12060cb9a634e3b1ae0b7d1508bedd37e3dc0394
|
hadoop
|
YARN-3028. Better syntax for replaceLabelsOnNode in- RMAdmin CLI. Contributed by Rohith Sharmaks--(cherry picked from commit fd93e5387b554a78413bc0f14b729e58fea604ea)-
|
p
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 5422c0eb26dbd..af6a01509c6dd 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -178,6 +178,9 @@ Release 2.7.0 - UNRELEASED
YARN-2897. CrossOriginFilter needs more log statements (Mit Desai via
jeagles)
+ YARN-3028. Better syntax for replaceLabelsOnNode in RMAdmin CLI
+ (Rohith Sharmaks via wangda)
+
OPTIMIZATIONS
BUG FIXES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java
index 9ea333cab4cba..6f1bbd09d83bf 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java
@@ -100,7 +100,8 @@ public class RMAdminCLI extends HAAdmin {
new UsageInfo("[label1,label2,label3] (label splitted by \",\")",
"remove from cluster node labels"))
.put("-replaceLabelsOnNode",
- new UsageInfo("[node1:port,label1,label2 node2:port,label1,label2]",
+ new UsageInfo(
+ "[node1[:port]=label1,label2 node2[:port]=label1,label2]",
"replace labels on nodes"))
.put("-directlyAccessNodeLabelStore",
new UsageInfo("", "Directly access node label store, "
@@ -199,7 +200,7 @@ private static void printHelp(String cmd, boolean isHAEnabled) {
" [-getGroup [username]]" +
" [[-addToClusterNodeLabels [label1,label2,label3]]" +
" [-removeFromClusterNodeLabels [label1,label2,label3]]" +
- " [-replaceLabelsOnNode [node1:port,label1,label2 node2:port,label1]" +
+ " [-replaceLabelsOnNode [node1[:port]=label1,label2 node2[:port]=label1]" +
" [-directlyAccessNodeLabelStore]]");
if (isHAEnabled) {
appendHAUsage(summary);
@@ -398,8 +399,18 @@ private Map<NodeId, Set<String>> buildNodeLabelsMapFromStr(String args)
continue;
}
- String[] splits = nodeToLabels.split(",");
+ // "," also supported for compatibility
+ String[] splits = nodeToLabels.split("=");
+ int index = 0;
+ if (splits.length != 2) {
+ splits = nodeToLabels.split(",");
+ index = 1;
+ }
+
String nodeIdStr = splits[0];
+ if (index == 0) {
+ splits = splits[1].split(",");
+ }
if (nodeIdStr.trim().isEmpty()) {
throw new IOException("node name cannot be empty");
@@ -408,7 +419,7 @@ private Map<NodeId, Set<String>> buildNodeLabelsMapFromStr(String args)
NodeId nodeId = ConverterUtils.toNodeIdWithDefaultPort(nodeIdStr);
map.put(nodeId, new HashSet<String>());
- for (int i = 1; i < splits.length; i++) {
+ for (int i = index; i < splits.length; i++) {
if (!splits[i].trim().isEmpty()) {
map.get(nodeId).add(splits[i].trim());
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java
index 92af27dc692ca..1dfeac21d4425 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java
@@ -73,7 +73,6 @@ public class TestRMAdminCLI {
@Before
public void configure() throws IOException, YarnException {
remoteAdminServiceAccessed = false;
- dummyNodeLabelsManager = new DummyCommonNodeLabelsManager();
admin = mock(ResourceManagerAdministrationProtocol.class);
when(admin.addToClusterNodeLabels(any(AddToClusterNodeLabelsRequest.class)))
.thenAnswer(new Answer<AddToClusterNodeLabelsResponse>() {
@@ -105,6 +104,7 @@ protected HAServiceTarget resolveTarget(String rmId) {
return haServiceTarget;
}
};
+ initDummyNodeLabelsManager();
rmAdminCLI.localNodeLabelsManager = dummyNodeLabelsManager;
YarnConfiguration conf = new YarnConfiguration();
@@ -124,6 +124,13 @@ protected HAServiceTarget resolveTarget(String rmId) {
};
}
+ private void initDummyNodeLabelsManager() {
+ Configuration conf = new YarnConfiguration();
+ conf.setBoolean(YarnConfiguration.NODE_LABELS_ENABLED, true);
+ dummyNodeLabelsManager = new DummyCommonNodeLabelsManager();
+ dummyNodeLabelsManager.init(conf);
+ }
+
@Test(timeout=500)
public void testRefreshQueues() throws Exception {
String[] args = { "-refreshQueues" };
@@ -281,7 +288,7 @@ public void testHelp() throws Exception {
"[-refreshAdminAcls] [-refreshServiceAcl] [-getGroup" +
" [username]] [[-addToClusterNodeLabels [label1,label2,label3]]" +
" [-removeFromClusterNodeLabels [label1,label2,label3]] [-replaceLabelsOnNode " +
- "[node1:port,label1,label2 node2:port,label1] [-directlyAccessNodeLabelStore]] " +
+ "[node1[:port]=label1,label2 node2[:port]=label1] [-directlyAccessNodeLabelStore]] " +
"[-help [cmd]]"));
assertTrue(dataOut
.toString()
@@ -361,7 +368,7 @@ public void testHelp() throws Exception {
"[-refreshAdminAcls] [-refreshServiceAcl] [-getGroup" +
" [username]] [[-addToClusterNodeLabels [label1,label2,label3]]" +
" [-removeFromClusterNodeLabels [label1,label2,label3]] [-replaceLabelsOnNode " +
- "[node1:port,label1,label2 node2:port,label1] [-directlyAccessNodeLabelStore]] " +
+ "[node1[:port]=label1,label2 node2[:port]=label1] [-directlyAccessNodeLabelStore]] " +
"[-transitionToActive [--forceactive] <serviceId>] " +
"[-transitionToStandby <serviceId>] [-failover" +
" [--forcefence] [--forceactive] <serviceId> <serviceId>] " +
@@ -501,24 +508,29 @@ public void testRemoveFromClusterNodeLabels() throws Exception {
@Test
public void testReplaceLabelsOnNode() throws Exception {
// Successfully replace labels
- dummyNodeLabelsManager.addToCluserNodeLabels(ImmutableSet.of("x", "Y"));
+ dummyNodeLabelsManager
+ .addToCluserNodeLabels(ImmutableSet.of("x", "y", "Y"));
String[] args =
- { "-replaceLabelsOnNode", "node1,x,Y node2,Y",
+ { "-replaceLabelsOnNode",
+ "node1:8000,x,y node2:8000=y node3,x,Y node4=Y",
"-directlyAccessNodeLabelStore" };
assertEquals(0, rmAdminCLI.run(args));
assertTrue(dummyNodeLabelsManager.getNodeLabels().containsKey(
- NodeId.newInstance("node1", 0)));
+ NodeId.newInstance("node1", 8000)));
assertTrue(dummyNodeLabelsManager.getNodeLabels().containsKey(
- NodeId.newInstance("node2", 0)));
-
+ NodeId.newInstance("node2", 8000)));
+ assertTrue(dummyNodeLabelsManager.getNodeLabels().containsKey(
+ NodeId.newInstance("node3", 0)));
+ assertTrue(dummyNodeLabelsManager.getNodeLabels().containsKey(
+ NodeId.newInstance("node4", 0)));
+
// no labels, should fail
args = new String[] { "-replaceLabelsOnNode" };
assertTrue(0 != rmAdminCLI.run(args));
-
+
// no labels, should fail
args =
- new String[] { "-replaceLabelsOnNode",
- "-directlyAccessNodeLabelStore" };
+ new String[] { "-replaceLabelsOnNode", "-directlyAccessNodeLabelStore" };
assertTrue(0 != rmAdminCLI.run(args));
// no labels, should fail
@@ -529,20 +541,6 @@ public void testReplaceLabelsOnNode() throws Exception {
assertTrue(0 != rmAdminCLI.run(args));
}
- @Test
- public void testReplaceLabelsOnNodeWithPort() throws Exception {
- // Successfully replace labels
- dummyNodeLabelsManager.addToCluserNodeLabels(ImmutableSet.of("x", "y"));
- String[] args =
- { "-replaceLabelsOnNode", "node1:8000,x,y node2:8000,y",
- "-directlyAccessNodeLabelStore" };
- assertEquals(0, rmAdminCLI.run(args));
- assertTrue(dummyNodeLabelsManager.getNodeLabels().containsKey(
- NodeId.newInstance("node1", 8000)));
- assertTrue(dummyNodeLabelsManager.getNodeLabels().containsKey(
- NodeId.newInstance("node2", 8000)));
- }
-
private void testError(String[] args, String template,
ByteArrayOutputStream data, int resultCode) throws Exception {
int actualResultCode = rmAdminCLI.run(args);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java
index 242f59caf2271..0ab1115491838 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java
@@ -333,23 +333,32 @@ private void assertNodeLabelsDisabledErrorMessage(IOException e) {
public void testNodeLabelsDisabled() throws IOException {
DummyCommonNodeLabelsManager mgr = new DummyCommonNodeLabelsManager();
Configuration conf = new YarnConfiguration();
- conf.setBoolean(YarnConfiguration.NODE_LABELS_ENABLED, true);
+ conf.setBoolean(YarnConfiguration.NODE_LABELS_ENABLED, false);
mgr.init(conf);
mgr.start();
+ boolean caught = false;
// add labels
try {
mgr.addToCluserNodeLabels(ImmutableSet.of("x"));
} catch (IOException e) {
assertNodeLabelsDisabledErrorMessage(e);
+ caught = true;
}
+ // check exception caught
+ Assert.assertTrue(caught);
+ caught = false;
// remove labels
try {
mgr.removeFromClusterNodeLabels(ImmutableSet.of("x"));
} catch (IOException e) {
assertNodeLabelsDisabledErrorMessage(e);
+ caught = true;
}
+ // check exception caught
+ Assert.assertTrue(caught);
+ caught = false;
// add labels to node
try {
@@ -357,7 +366,11 @@ public void testNodeLabelsDisabled() throws IOException {
CommonNodeLabelsManager.EMPTY_STRING_SET));
} catch (IOException e) {
assertNodeLabelsDisabledErrorMessage(e);
+ caught = true;
}
+ // check exception caught
+ Assert.assertTrue(caught);
+ caught = false;
// remove labels from node
try {
@@ -365,7 +378,11 @@ public void testNodeLabelsDisabled() throws IOException {
CommonNodeLabelsManager.EMPTY_STRING_SET));
} catch (IOException e) {
assertNodeLabelsDisabledErrorMessage(e);
+ caught = true;
}
+ // check exception caught
+ Assert.assertTrue(caught);
+ caught = false;
// replace labels on node
try {
@@ -373,7 +390,11 @@ public void testNodeLabelsDisabled() throws IOException {
CommonNodeLabelsManager.EMPTY_STRING_SET));
} catch (IOException e) {
assertNodeLabelsDisabledErrorMessage(e);
+ caught = true;
}
+ // check exception caught
+ Assert.assertTrue(caught);
+ caught = false;
mgr.close();
}
|
46ccb06f43fa17fd6e60aea15747f34f70516107
|
restlet-framework-java
|
- Renamed the "org.restlet.ssl.hostnameVerifier"- attribute into just "hostnameVerifier" for consistency with existing naming.--
|
p
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/build/tmpl/text/changes.txt b/build/tmpl/text/changes.txt
index 58afd0717c..79a5dfc320 100644
--- a/build/tmpl/text/changes.txt
+++ b/build/tmpl/text/changes.txt
@@ -89,9 +89,9 @@ Changes log
Sponsored by NetDev Ltd (http://www.netdev.co.uk).
- The Apache HTTP Client extension can now have a retry handler
parameter set. Suggested by Sanjay Acharya.
- - Added a "sslContextFactory" parameter to HTTPS connector to
- improve the flexibility of SSL configuration. If this
- parameter isn't set, the previous parameters are used
+ - Added a "sslContextFactory" parameter and attribute to HTTPS
+ connectors to improve the flexibility of SSL configuration. If
+ this parameter isn't set, the previous parameters are used
instead. Contributed by Bruno Harbulot.
- Exceptions occuring while writing the response entities are
now reported as severe errors instead of just info messages.
@@ -103,7 +103,9 @@ Changes log
- Added a port of Restlet to GWT. This is a subset of the
Restlet API and Engine, mostly the client side working on top
of GWT 1.5 RC1.
- - Slightly refactored and documented OAuth extension.
+ - Slightly refactored and documented OAuth extension.
+ - Renamed the "org.restlet.ssl.hostnameVerifier" attribute into
+ just "hostnameVerifier" for consistency with existing naming.
- Misc
- Refactored WAR client to remove remaining class in NRE core.
diff --git a/modules/com.noelios.restlet.ext.net/src/com/noelios/restlet/ext/net/HttpClientHelper.java b/modules/com.noelios.restlet.ext.net/src/com/noelios/restlet/ext/net/HttpClientHelper.java
index 4e79910eb7..7fe2d71913 100644
--- a/modules/com.noelios.restlet.ext.net/src/com/noelios/restlet/ext/net/HttpClientHelper.java
+++ b/modules/com.noelios.restlet.ext.net/src/com/noelios/restlet/ext/net/HttpClientHelper.java
@@ -161,13 +161,13 @@ public int getConnectTimeout() {
/**
* Returns the hostname verifier by looking up the
- * "org.restlet.ssl.hostnameVerifier" attribute of the client's context.
+ * "hostnameVerifier" attribute of the client's context.
*
* @return The hostname verifier or null.
*/
public HostnameVerifier getHostnameVerifier() {
return (HostnameVerifier) getAttributes().get(
- "org.restlet.ssl.hostnameVerifier");
+ "hostnameVerifier");
}
/**
diff --git a/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/util/ExceptionHandler.java b/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/util/ExceptionHandler.java
index 4a02c71dab..cd139ec056 100644
--- a/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/util/ExceptionHandler.java
+++ b/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/util/ExceptionHandler.java
@@ -211,7 +211,7 @@ public RequestHandledException missingAnnotation(
* @return staticly to throw, if needed by compiler.
*/
public WebApplicationException noMessageBodyWriter(Class<?> entityClass,
- Type genericType, @SuppressWarnings("unused") Annotation[] annotations) {
+ Type genericType, Annotation[] annotations) {
this.logger.warning("No message body writer found for class "
+ entityClass + ", genericType " + genericType);
// LATER log also annotations
diff --git a/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/ResourceClass.java b/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/ResourceClass.java
index 60fe5d36c5..8450f5af99 100644
--- a/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/ResourceClass.java
+++ b/modules/org.restlet.ext.jaxrs_0.9/src/org/restlet/ext/jaxrs/internal/wrappers/ResourceClass.java
@@ -144,7 +144,7 @@ public class ResourceClass extends AbstractJaxRsWrapper {
protected ResourceClass(Class<?> jaxRsClass,
ThreadLocalizedContext tlContext, JaxRsProviders jaxRsProviders,
ExtensionBackwardMapping extensionBackwardMapping,
- Logger logger, @SuppressWarnings("unused") Logger sameLogger) throws IllegalArgumentException,
+ Logger logger, Logger sameLogger) throws IllegalArgumentException,
IllegalPathOnClassException, MissingAnnotationException {
super(PathRegExp.createForClass(jaxRsClass));
this.leaveEncoded = jaxRsClass.isAnnotationPresent(Encoded.class);
|
12585ca80029fbd7b024d4c47b05f71dbdb2f70c
|
intellij-community
|
FileNameCache: enable caching always don't- store common prefix (saves 8 entry bytes vs. less prefix average length)--
|
p
|
https://github.com/JetBrains/intellij-community
|
diff --git a/platform/platform-impl/src/com/intellij/openapi/vfs/newvfs/impl/FileNameCache.java b/platform/platform-impl/src/com/intellij/openapi/vfs/newvfs/impl/FileNameCache.java
index 487975953cb77..eddb684291896 100644
--- a/platform/platform-impl/src/com/intellij/openapi/vfs/newvfs/impl/FileNameCache.java
+++ b/platform/platform-impl/src/com/intellij/openapi/vfs/newvfs/impl/FileNameCache.java
@@ -33,8 +33,7 @@
public class FileNameCache {
private static final PersistentStringEnumerator ourNames = FSRecords.getNames();
@NonNls private static final String EMPTY = "";
- @NonNls private static final String[] WELL_KNOWN_SUFFIXES = {EMPTY, "$1.class", "$2.class","Test.java","List.java","tion.java", ".class", ".java", ".html", ".txt", ".xml",".php",".gif",".svn",".css",".js"};
- private static final IntSLRUCache<NameSuffixEntry> ourNameCache = new IntSLRUCache<NameSuffixEntry>(40000, 20000);
+ private static final IntSLRUCache<IntObjectLinkedMap.MapEntry<Object>> ourNameCache = new IntSLRUCache<IntObjectLinkedMap.MapEntry<Object>>(40000, 20000);
static int storeName(@NotNull String name) {
final int idx = FSRecords.getNameId(name);
@@ -43,36 +42,17 @@ static int storeName(@NotNull String name) {
}
@NotNull
- private static NameSuffixEntry cacheData(String name, int id) {
+ private static IntObjectLinkedMap.MapEntry<Object> cacheData(String name, int id) {
if (name == null) {
ourNames.markCorrupted();
throw new RuntimeException("VFS name enumerator corrupted");
}
- byte suffixId = findSuffix(name);
- Object rawName = convertToBytesIfAsciiString(suffixId == 0 ? name : name.substring(0, name.length() -
- WELL_KNOWN_SUFFIXES[suffixId].length()));
- NameSuffixEntry entry = new NameSuffixEntry(id, suffixId, rawName);
- if (shouldUseCache()) {
- synchronized (ourNameCache) {
- entry = ourNameCache.cacheEntry(entry);
- }
+ Object rawName = convertToBytesIfAsciiString(name);
+ IntObjectLinkedMap.MapEntry<Object> entry = new IntObjectLinkedMap.MapEntry<Object>(id, rawName);
+ synchronized (ourNameCache) {
+ return ourNameCache.cacheEntry(entry);
}
- return entry;
- }
-
- private static boolean shouldUseCache() {
- return true;
- }
-
- private static byte findSuffix(String name) {
- for (byte i = 1; i < WELL_KNOWN_SUFFIXES.length; i++) {
- String suffix = WELL_KNOWN_SUFFIXES[i];
- if (name.endsWith(suffix)) {
- return i;
- }
- }
- return 0;
}
private static Object convertToBytesIfAsciiString(@NotNull String name) {
@@ -91,13 +71,11 @@ private static Object convertToBytesIfAsciiString(@NotNull String name) {
}
@NotNull
- private static NameSuffixEntry getEntry(int id) {
- if (shouldUseCache()) {
- synchronized (ourNameCache) {
- NameSuffixEntry entry = ourNameCache.getCachedEntry(id);
- if (entry != null) {
- return entry;
- }
+ private static IntObjectLinkedMap.MapEntry<Object> getEntry(int id) {
+ synchronized (ourNameCache) {
+ IntObjectLinkedMap.MapEntry<Object> entry = ourNameCache.getCachedEntry(id);
+ if (entry != null) {
+ return entry;
}
}
@@ -106,27 +84,25 @@ private static NameSuffixEntry getEntry(int id) {
@NotNull
static String getVFileName(int nameId) {
- NameSuffixEntry entry = getEntry(nameId);
- Object name = entry.getRawName();
- String suffix = entry.getSuffix();
+ IntObjectLinkedMap.MapEntry<Object> entry = getEntry(nameId);
+ Object name = entry.value;
if (name instanceof String) {
//noinspection StringEquality
- return suffix == EMPTY ? (String)name : name + suffix;
+ return (String)name;
}
byte[] bytes = (byte[])name;
int length = bytes.length;
- char[] chars = new char[length + suffix.length()];
+ char[] chars = new char[length];
for (int i = 0; i < length; i++) {
chars[i] = (char)bytes[i];
}
- VirtualFileSystemEntry.copyString(chars, length, suffix);
return StringFactory.createShared(chars);
}
static int compareNameTo(int nameId, @NotNull String name, boolean ignoreCase) {
- NameSuffixEntry entry = getEntry(nameId);
- Object rawName = entry.getRawName();
+ IntObjectLinkedMap.MapEntry<Object> entry = getEntry(nameId);
+ Object rawName = entry.value;
if (rawName instanceof String) {
String thisName = getVFileName(nameId);
return VirtualFileSystemEntry.compareNames(thisName, name, ignoreCase);
@@ -135,17 +111,10 @@ static int compareNameTo(int nameId, @NotNull String name, boolean ignoreCase) {
byte[] bytes = (byte[])rawName;
int bytesLength = bytes.length;
- String suffix = entry.getSuffix();
- int suffixLength = suffix.length();
-
- int d = bytesLength + suffixLength - name.length();
- if (d != 0) return d;
-
- d = compareBytes(bytes, 0, name, 0, bytesLength, ignoreCase);
+ int d = bytesLength - name.length();
if (d != 0) return d;
- d = VirtualFileSystemEntry.compareNames(suffix, name, ignoreCase, bytesLength);
- return d;
+ return compareBytes(bytes, 0, name, 0, bytesLength, ignoreCase);
}
private static int compareBytes(@NotNull byte[] name1, int offset1, @NotNull String name2, int offset2, int len, boolean ignoreCase) {
@@ -159,12 +128,10 @@ private static int compareBytes(@NotNull byte[] name1, int offset1, @NotNull Str
}
static char[] appendPathOnFileSystem(int nameId, @Nullable VirtualFileSystemEntry parent, int accumulatedPathLength, int[] positionRef) {
- NameSuffixEntry entry = getEntry(nameId);
- Object o = entry.getRawName();
- String suffix = entry.getSuffix();
- int rawNameLength = o instanceof String ? ((String)o).length() : ((byte[])o).length;
- int nameLength = rawNameLength + suffix.length();
- boolean appendSlash = SystemInfo.isWindows && parent == null && suffix.isEmpty() && rawNameLength == 2 &&
+ IntObjectLinkedMap.MapEntry<Object> entry = getEntry(nameId);
+ Object o = entry.value;
+ int nameLength = o instanceof String ? ((String)o).length() : ((byte[])o).length;
+ boolean appendSlash = SystemInfo.isWindows && parent == null && nameLength == 2 &&
(o instanceof String ? ((String)o).charAt(1) : (char)((byte[])o)[1]) == ':';
char[] chars;
@@ -196,29 +163,8 @@ static char[] appendPathOnFileSystem(int nameId, @Nullable VirtualFileSystemEntr
if (appendSlash) {
chars[positionRef[0]++] = '/';
}
- else {
- positionRef[0] = VirtualFileSystemEntry.copyString(chars, positionRef[0], suffix);
- }
return chars;
}
- private static class NameSuffixEntry extends IntObjectLinkedMap.MapEntry<Object> {
- final byte suffixId;
-
- private NameSuffixEntry(int nameId, byte suffixId, Object rawName) {
- super(nameId, rawName);
- this.suffixId = suffixId;
- }
-
- Object getRawName() {
- return value;
- }
-
- public String getSuffix() {
- return WELL_KNOWN_SUFFIXES[suffixId];
- }
- }
-
-
}
|
68f61f3b3c2efaa263190519be6ebf4a02e021ad
|
spring-framework
|
Fix nested @Component annotation instantiation- bug--3.1 M2 introduced a regression that causes false positives during-@Configuration class candidate checks. Now performing a call to-AnnotationMetadata-isInterface in addition to checks for @Component and-@Bean annotations when determining whether a candidate is a 'lite'-configuration class. Annotations are in the end interfaces, so both-are filtered out at once.--Issue: SPR-8761-
|
c
|
https://github.com/spring-projects/spring-framework
|
diff --git a/org.springframework.context/src/main/java/org/springframework/context/annotation/ConfigurationClassUtils.java b/org.springframework.context/src/main/java/org/springframework/context/annotation/ConfigurationClassUtils.java
index 2d192a809c94..5a909771056b 100644
--- a/org.springframework.context/src/main/java/org/springframework/context/annotation/ConfigurationClassUtils.java
+++ b/org.springframework.context/src/main/java/org/springframework/context/annotation/ConfigurationClassUtils.java
@@ -100,8 +100,9 @@ public static boolean isFullConfigurationCandidate(AnnotationMetadata metadata)
}
public static boolean isLiteConfigurationCandidate(AnnotationMetadata metadata) {
- return metadata.isAnnotated(Component.class.getName()) ||
- metadata.hasAnnotatedMethods(Bean.class.getName());
+ return !metadata.isInterface() && // not an interface or an annotation
+ (metadata.isAnnotated(Component.class.getName()) ||
+ metadata.hasAnnotatedMethods(Bean.class.getName()));
}
diff --git a/org.springframework.context/src/test/java/org/springframework/context/annotation/spr8761/Spr8761Tests.java b/org.springframework.context/src/test/java/org/springframework/context/annotation/spr8761/Spr8761Tests.java
new file mode 100644
index 000000000000..c37c22106cda
--- /dev/null
+++ b/org.springframework.context/src/test/java/org/springframework/context/annotation/spr8761/Spr8761Tests.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2002-2011 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.context.annotation.spr8761;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+import org.junit.Test;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+import org.springframework.stereotype.Component;
+
+/**
+ * Tests cornering the regression reported in SPR-8761.
+ *
+ * @author Chris Beams
+ */
+public class Spr8761Tests {
+
+ /**
+ * Prior to the fix for SPR-8761, this test threw because the nested MyComponent
+ * annotation was being falsely considered as a 'lite' Configuration class candidate.
+ */
+ @Test
+ public void repro() {
+ AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
+ ctx.scan(getClass().getPackage().getName());
+ ctx.refresh();
+ assertThat(ctx.containsBean("withNestedAnnotation"), is(true));
+ }
+
+}
+
+@Component
+class WithNestedAnnotation {
+
+ @Retention(RetentionPolicy.RUNTIME)
+ @Component
+ public static @interface MyComponent {
+ }
+}
|
415be84340155fa0eb6086149a7de381a41ef480
|
drools
|
Fixing NPE--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@6187 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-core/src/main/java/org/drools/reteoo/NotNode.java b/drools-core/src/main/java/org/drools/reteoo/NotNode.java
index 62544d106f5..55470f365c5 100644
--- a/drools-core/src/main/java/org/drools/reteoo/NotNode.java
+++ b/drools-core/src/main/java/org/drools/reteoo/NotNode.java
@@ -242,10 +242,12 @@ public void retractTuple(final ReteTuple leftTuple,
}
LinkedList list = leftTuple.getLinkedTuples();
- for ( LinkedListNode node = list.getFirst(); node != null; node = node.getNext() ) {
- ReteTuple tuple = (ReteTuple) ((LinkedListObjectWrapper) node).getObject();
- tuple.retractTuple( context,
- workingMemory );
+ if( list != null ) {
+ for ( LinkedListNode node = list.getFirst(); node != null; node = node.getNext() ) {
+ ReteTuple tuple = (ReteTuple) ((LinkedListObjectWrapper) node).getObject();
+ tuple.retractTuple( context,
+ workingMemory );
+ }
}
}
|
06b1f1ec2fc0bca8f4d6d5345cbf42777b4d89ff
|
kotlin
|
Smart completion: code improvements and- refactorings after code review--
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/idea/src/org/jetbrains/jet/plugin/completion/CompletionSession.java b/idea/src/org/jetbrains/jet/plugin/completion/CompletionSession.java
index c0ad01c314051..b6f90474e431d 100644
--- a/idea/src/org/jetbrains/jet/plugin/completion/CompletionSession.java
+++ b/idea/src/org/jetbrains/jet/plugin/completion/CompletionSession.java
@@ -16,6 +16,8 @@
package org.jetbrains.jet.plugin.completion;
+import com.google.common.base.Predicate;
+import com.google.common.collect.Collections2;
import com.intellij.codeInsight.completion.CompletionParameters;
import com.intellij.codeInsight.completion.CompletionResultSet;
import com.intellij.codeInsight.completion.CompletionType;
@@ -43,7 +45,6 @@
import org.jetbrains.jet.plugin.references.JetSimpleNameReference;
import java.util.Collection;
-import java.util.Collections;
class CompletionSession {
@Nullable
@@ -52,13 +53,6 @@ class CompletionSession {
private final JetCompletionResultSet jetResult;
private final JetSimpleNameReference jetReference;
- private final Condition<DeclarationDescriptor> descriptorFilter = new Condition<DeclarationDescriptor>() {
- @Override
- public boolean value(DeclarationDescriptor descriptor) {
- return isVisibleDescriptor(descriptor);
- }
- };
-
public CompletionSession(
@NotNull CompletionParameters parameters,
@NotNull CompletionResultSet result,
@@ -75,10 +69,17 @@ public CompletionSession(
inDescriptor = scope != null ? scope.getContainingDeclaration() : null;
+ Condition<DeclarationDescriptor> descriptorFilter = new Condition<DeclarationDescriptor>() {
+ @Override
+ public boolean value(DeclarationDescriptor descriptor) {
+ return isVisibleDescriptor(descriptor);
+ }
+ };
this.jetResult = new JetCompletionResultSet(
WeigherPackage.addJetSorting(result, parameters),
resolveSession,
- expressionBindingContext, descriptorFilter);
+ expressionBindingContext,
+ descriptorFilter);
}
public void completeForReference() {
@@ -134,20 +135,18 @@ public boolean value(DeclarationDescriptor descriptor) {
public void completeSmart() {
assert parameters.getCompletionType() == CompletionType.SMART;
- final SmartCompletionData data = CompletionPackage.buildSmartCompletionData(jetReference.getExpression(), getResolveSession(), new Function1<DeclarationDescriptor, Boolean>() {
+ Collection<DeclarationDescriptor> descriptors = TipsManager.getReferenceVariants(
+ jetReference.getExpression(), getExpressionBindingContext());
+ Function1<DeclarationDescriptor, Boolean> visibilityFilter = new Function1<DeclarationDescriptor, Boolean>() {
@Override
public Boolean invoke(DeclarationDescriptor descriptor) {
return isVisibleDescriptor(descriptor);
}
- });
- if (data != null) {
- addReferenceVariants(new Function1<DeclarationDescriptor, Iterable<LookupElement>>(){
- @Override
- public Iterable<LookupElement> invoke(DeclarationDescriptor descriptor) {
- return data.toElements(descriptor);
- }
- });
- for (LookupElement element : data.getAdditionalElements()) {
+ };
+ Collection<LookupElement> elements = CompletionPackage.buildSmartCompletionData(
+ jetReference.getExpression(), getResolveSession(), descriptors, visibilityFilter);
+ if (elements != null) {
+ for (LookupElement element : elements) {
jetResult.addElement(element);
}
}
@@ -244,33 +243,21 @@ private boolean shouldRunTopLevelCompletion() {
}
private boolean shouldRunExtensionsCompletion() {
- return !(parameters.getInvocationCount() <= 1 && jetResult.getResult().getPrefixMatcher().getPrefix().length() < 3);
+ return parameters.getInvocationCount() > 1 || jetResult.getResult().getPrefixMatcher().getPrefix().length() >= 3;
}
private void addReferenceVariants(@NotNull final Condition<DeclarationDescriptor> filterCondition) {
- addReferenceVariants(new Function1<DeclarationDescriptor, Iterable<LookupElement>>(){
- @Override
- public Iterable<LookupElement> invoke(DeclarationDescriptor descriptor) {
- return filterCondition.value(descriptor)
- ? Collections.singletonList(
- DescriptorLookupConverter.createLookupElement(getResolveSession(), getExpressionBindingContext(), descriptor))
- : Collections.<LookupElement>emptyList();
- }
- });
- }
-
- private void addReferenceVariants(@NotNull Function1<DeclarationDescriptor, Iterable<LookupElement>> filter) {
Collection<DeclarationDescriptor> descriptors = TipsManager.getReferenceVariants(
jetReference.getExpression(), getExpressionBindingContext());
- for (DeclarationDescriptor descriptor : descriptors) {
- if (descriptor != null && descriptorFilter.value(descriptor)) {
- Iterable<LookupElement> elements = filter.invoke(descriptor);
- for (LookupElement element : elements) {
- jetResult.addElement(element);
- }
+ Collection<DeclarationDescriptor> filterDescriptors = Collections2.filter(descriptors, new Predicate<DeclarationDescriptor>() {
+ @Override
+ public boolean apply(@Nullable DeclarationDescriptor descriptor) {
+ return descriptor != null && filterCondition.value(descriptor);
}
- }
+ });
+
+ jetResult.addAllElements(filterDescriptors);
}
private boolean isVisibleDescriptor(DeclarationDescriptor descriptor) {
diff --git a/idea/src/org/jetbrains/jet/plugin/completion/SmartCompletion.kt b/idea/src/org/jetbrains/jet/plugin/completion/SmartCompletion.kt
index 3927afd51c0c9..1df1c07aac818 100644
--- a/idea/src/org/jetbrains/jet/plugin/completion/SmartCompletion.kt
+++ b/idea/src/org/jetbrains/jet/plugin/completion/SmartCompletion.kt
@@ -34,11 +34,6 @@ import org.jetbrains.jet.lang.resolve.scopes.receivers.ReceiverValue
import com.intellij.lang.ASTNode
import org.jetbrains.jet.lang.resolve.scopes.JetScope
-trait SmartCompletionData{
- fun toElements(descriptor: DeclarationDescriptor): Iterable<LookupElement>
- val additionalElements: Iterable<LookupElement>
-}
-
enum class Tail {
COMMA
PARENTHESIS
@@ -48,7 +43,8 @@ data class ExpectedTypeInfo(val `type`: JetType, val tail: Tail?)
fun buildSmartCompletionData(expression: JetSimpleNameExpression,
resolveSession: ResolveSessionForBodies,
- visibilityFilter: (DeclarationDescriptor) -> Boolean): SmartCompletionData? {
+ referenceVariants: Iterable<DeclarationDescriptor>,
+ visibilityFilter: (DeclarationDescriptor) -> Boolean): Collection<LookupElement>? {
val parent = expression.getParent()
val expressionWithType: JetExpression;
val receiver: JetExpression?
@@ -67,104 +63,72 @@ fun buildSmartCompletionData(expression: JetSimpleNameExpression,
val expectedTypes = allExpectedTypes.filter { !it.`type`.isError() }
if (expectedTypes.isEmpty()) return null
- val itemsToSkip = calcItemsToSkip(expressionWithType, resolveSession)
-
- val additionalElements = ArrayList<LookupElement>()
-
- if (receiver == null) {
- additionalElements.addTypeInstantiationItems(expectedTypes, resolveSession, bindingContext)
+ val result = ArrayList<LookupElement>()
- additionalElements.addStaticMembers(expressionWithType, expectedTypes, resolveSession, bindingContext)
+ val typesOf: (DeclarationDescriptor) -> Iterable<JetType> = dataFlowToDescriptorTypes(expressionWithType, receiver, bindingContext)
- additionalElements.addThisItems(expressionWithType, expectedTypes, bindingContext)
- }
+ val itemsToSkip = calcItemsToSkip(expressionWithType, resolveSession)
- val dataFlowInfo = bindingContext[BindingContext.EXPRESSION_DATA_FLOW_INFO, expressionWithType]
- val (variableToTypes: Map<VariableDescriptor, Collection<JetType>>, notNullVariables: Set<VariableDescriptor>) = processDataFlowInfo(dataFlowInfo, receiver, bindingContext)
+ for (descriptor in referenceVariants) {
+ if (itemsToSkip.contains(descriptor)) continue
- fun typesOf(descriptor: DeclarationDescriptor): Iterable<JetType> {
- if (descriptor is CallableDescriptor) {
- var returnType = descriptor.getReturnType()
- if (returnType != null && KotlinBuiltIns.getInstance().isNothing(returnType!!)) { //TODO: maybe we should include them on the second press?
- return listOf()
+ run {
+ val matchedExpectedTypes = expectedTypes.filter { expectedType ->
+ typesOf(descriptor).any { descriptorType -> isSubtypeOf(descriptorType, expectedType.`type`) }
}
- if (descriptor is VariableDescriptor) {
- if (notNullVariables.contains(descriptor) && returnType != null) {
- returnType = TypeUtils.makeNotNullable(returnType!!)
- }
-
- val autoCastTypes = variableToTypes[descriptor]
- if (autoCastTypes != null && !autoCastTypes.isEmpty()) {
- return autoCastTypes + returnType.toList()
- }
+ if (matchedExpectedTypes.isNotEmpty()) {
+ val lookupElement = DescriptorLookupConverter.createLookupElement(resolveSession, bindingContext, descriptor)
+ result.add(addTailToLookupElement(lookupElement, matchedExpectedTypes))
}
- return returnType.toList()
- }
- else if (descriptor is ClassDescriptor && descriptor.getKind() == ClassKind.ENUM_ENTRY) {
- return listOf(descriptor.getDefaultType())
}
- else {
- return listOf()
- }
- }
- return object: SmartCompletionData {
- override fun toElements(descriptor: DeclarationDescriptor): Iterable<LookupElement> {
- if (itemsToSkip.contains(descriptor)) return listOf()
-
- val result = ArrayList<LookupElement>()
-
- run {
- val matchedExpectedTypes = expectedTypes.filter { expectedType -> typesOf(descriptor).any { descriptorType -> isSubtypeOf(descriptorType, expectedType.`type`) } }
- if (matchedExpectedTypes.isNotEmpty()) {
- val tail = mergeTails(matchedExpectedTypes.map { it.tail })
- result.add(addTailToLookupElement(DescriptorLookupConverter.createLookupElement(resolveSession, bindingContext, descriptor), tail))
- }
- }
-
- val functionExpectedTypes = expectedTypes.filter { KotlinBuiltIns.getInstance().isExactFunctionOrExtensionFunctionType(it.`type`) }
- if (functionExpectedTypes.isNotEmpty()) {
- fun functionReferenceLookupElement(descriptor: FunctionDescriptor): LookupElement? {
- val functionType = functionType(descriptor)
- if (functionType == null) return null
-
- val matchedExpectedTypes = functionExpectedTypes.filter { isSubtypeOf(functionType, it.`type`) }
- if (matchedExpectedTypes.isEmpty()) return null
- val lookupElement = DescriptorLookupConverter.createLookupElement(resolveSession, bindingContext, descriptor)
- val text = "::" + (if (descriptor is ConstructorDescriptor) descriptor.getContainingDeclaration().getName() else descriptor.getName())
- val lookupElementDecorated = object: LookupElementDecorator<LookupElement>(lookupElement) {
- override fun getLookupString() = text
-
- override fun renderElement(presentation: LookupElementPresentation) {
- super.renderElement(presentation)
- presentation.setItemText(text)
- presentation.setTypeText("")
- }
-
- override fun handleInsert(context: InsertionContext) {
- }
+ val functionExpectedTypes = expectedTypes.filter { KotlinBuiltIns.getInstance().isExactFunctionOrExtensionFunctionType(it.`type`) }
+ if (functionExpectedTypes.isNotEmpty()) {
+ fun functionReferenceLookupElement(descriptor: FunctionDescriptor): LookupElement? {
+ val functionType = functionType(descriptor)
+ if (functionType == null) return null
+
+ val matchedExpectedTypes = functionExpectedTypes.filter { isSubtypeOf(functionType, it.`type`) }
+ if (matchedExpectedTypes.isEmpty()) return null
+ val lookupElement = DescriptorLookupConverter.createLookupElement(resolveSession, bindingContext, descriptor)
+ val text = "::" + (if (descriptor is ConstructorDescriptor) descriptor.getContainingDeclaration().getName() else descriptor.getName())
+ val lookupElementDecorated = object: LookupElementDecorator<LookupElement>(lookupElement) {
+ override fun getLookupString() = text
+
+ override fun renderElement(presentation: LookupElementPresentation) {
+ super.renderElement(presentation)
+ presentation.setItemText(text)
+ presentation.setTypeText("")
}
- val tail = mergeTails(matchedExpectedTypes.map { it.tail })
- return addTailToLookupElement(lookupElementDecorated, tail)
- }
-
- if (descriptor is SimpleFunctionDescriptor) {
- functionReferenceLookupElement(descriptor)?.let { result.add(it) }
- }
- else if (descriptor is ClassDescriptor && descriptor.getModality() != Modality.ABSTRACT) {
- val constructors = descriptor.getConstructors().filter(visibilityFilter)
- if (constructors.size == 1) { //TODO: this code is to be changed if overloads to start work after ::
- functionReferenceLookupElement(constructors.single())?.let { result.add(it) }
+ override fun handleInsert(context: InsertionContext) {
}
}
+
+ return addTailToLookupElement(lookupElementDecorated, matchedExpectedTypes)
}
- return result
+ if (descriptor is SimpleFunctionDescriptor) {
+ functionReferenceLookupElement(descriptor)?.let { result.add(it) }
+ }
+ else if (descriptor is ClassDescriptor && descriptor.getModality() != Modality.ABSTRACT) {
+ val constructors = descriptor.getConstructors().filter(visibilityFilter)
+ if (constructors.size == 1) { //TODO: this code is to be changed if overloads to start work after ::
+ functionReferenceLookupElement(constructors.single())?.let { result.add(it) }
+ }
+ }
}
+ }
+
+ if (receiver == null) {
+ result.addTypeInstantiationItems(expectedTypes, resolveSession, bindingContext)
+
+ result.addStaticMembers(expressionWithType, expectedTypes, resolveSession, bindingContext)
- override val additionalElements = additionalElements
+ result.addThisItems(expressionWithType, expectedTypes, bindingContext)
}
+
+ return result
}
private fun calcExpectedTypes(expressionWithType: JetExpression, bindingContext: BindingContext, moduleDescriptor: ModuleDescriptor): Collection<ExpectedTypeInfo>? {
@@ -364,8 +328,7 @@ private fun MutableCollection<LookupElement>.addThisItems(context: JetExpression
val qualifier = if (i == 0) null else thisQualifierName(receiver, bindingContext) ?: continue
val expressionText = if (qualifier == null) "this" else "this@" + qualifier
val lookupElement = LookupElementBuilder.create(expressionText).withTypeText(DescriptorRenderer.TEXT.renderType(thisType))
- val tailType = mergeTails(matchedExpectedTypes.map { it.tail })
- add(addTailToLookupElement(lookupElement, tailType))
+ add(addTailToLookupElement(lookupElement, matchedExpectedTypes))
}
}
}
@@ -388,6 +351,40 @@ private fun thisQualifierName(receiver: ReceiverParameterDescriptor, bindingCont
?.getReferencedName()
}
+private fun dataFlowToDescriptorTypes(expression: JetExpression, receiver: JetExpression?, bindingContext: BindingContext): (DeclarationDescriptor) -> Iterable<JetType> {
+ val dataFlowInfo = bindingContext[BindingContext.EXPRESSION_DATA_FLOW_INFO, expression]
+ val (variableToTypes: Map<VariableDescriptor, Collection<JetType>>, notNullVariables: Set<VariableDescriptor>)
+ = processDataFlowInfo(dataFlowInfo, receiver, bindingContext)
+
+ fun typesOf(descriptor: DeclarationDescriptor): Iterable<JetType> {
+ if (descriptor is CallableDescriptor) {
+ var returnType = descriptor.getReturnType()
+ if (returnType != null && KotlinBuiltIns.getInstance().isNothing(returnType!!)) { //TODO: maybe we should include them on the second press?
+ return listOf()
+ }
+ if (descriptor is VariableDescriptor) {
+ if (notNullVariables.contains(descriptor) && returnType != null) {
+ returnType = TypeUtils.makeNotNullable(returnType!!)
+ }
+
+ val autoCastTypes = variableToTypes[descriptor]
+ if (autoCastTypes != null && !autoCastTypes.isEmpty()) {
+ return autoCastTypes + returnType.toList()
+ }
+ }
+ return returnType.toList()
+ }
+ else if (descriptor is ClassDescriptor && descriptor.getKind() == ClassKind.ENUM_ENTRY) {
+ return listOf(descriptor.getDefaultType())
+ }
+ else {
+ return listOf()
+ }
+ }
+
+ return ::typesOf
+}
+
private data class ProcessDataFlowInfoResult(
val variableToTypes: Map<VariableDescriptor, Collection<JetType>> = Collections.emptyMap(),
val notNullVariables: Set<VariableDescriptor> = Collections.emptySet()
@@ -550,8 +547,7 @@ private fun MutableCollection<LookupElement>.addStaticMembers(classDescriptor: C
}
}
- val tail = mergeTails(descriptorExpectedTypes.map { it.tail })
- add(addTailToLookupElement(lookupElementDecorated, tail))
+ add(addTailToLookupElement(lookupElementDecorated, descriptorExpectedTypes))
}
}
@@ -578,6 +574,9 @@ private fun addTailToLookupElement(lookupElement: LookupElement, tail: Tail?): L
}
}
+private fun addTailToLookupElement(lookupElement: LookupElement, expectedTypes: Collection<ExpectedTypeInfo>): LookupElement
+ = addTailToLookupElement(lookupElement, mergeTails(expectedTypes.map { it.tail }))
+
private fun functionType(function: FunctionDescriptor): JetType? {
return KotlinBuiltIns.getInstance().getKFunctionType(function.getAnnotations(),
null,
diff --git a/idea/src/org/jetbrains/jet/plugin/completion/WithTailInsertHandler.kt b/idea/src/org/jetbrains/jet/plugin/completion/handlers/WithTailInsertHandler.kt
similarity index 69%
rename from idea/src/org/jetbrains/jet/plugin/completion/WithTailInsertHandler.kt
rename to idea/src/org/jetbrains/jet/plugin/completion/handlers/WithTailInsertHandler.kt
index 7986994773850..bfa508bf7cad7 100644
--- a/idea/src/org/jetbrains/jet/plugin/completion/WithTailInsertHandler.kt
+++ b/idea/src/org/jetbrains/jet/plugin/completion/handlers/WithTailInsertHandler.kt
@@ -1,9 +1,8 @@
-package org.jetbrains.jet.plugin.completion
+package org.jetbrains.jet.plugin.completion.handlers
-import com.intellij.codeInsight.lookup.*
import com.intellij.codeInsight.completion.*
-import com.intellij.openapi.editor.event.DocumentListener
-import com.intellij.openapi.editor.event.DocumentEvent
+import com.intellij.codeInsight.lookup.LookupElement
+import com.intellij.openapi.editor.event.*
import com.intellij.openapi.util.TextRange
import com.intellij.psi.PsiDocumentManager
@@ -29,18 +28,22 @@ class WithTailInsertHandler(val tailChar: Char, val spaceAfter: Boolean) : Inser
}
document.addDocumentListener(documentListener)
-
- item.handleInsert(context)
- PsiDocumentManager.getInstance(context.getProject()).doPostponedOperationsAndUnblockDocument(document)
-
- document.removeDocumentListener(documentListener)
+ try{
+ item.handleInsert(context)
+ PsiDocumentManager.getInstance(context.getProject()).doPostponedOperationsAndUnblockDocument(document)
+ }
+ finally {
+ document.removeDocumentListener(documentListener)
+ }
val moveCaret = caretModel.getOffset() == maxChangeOffset
- if (maxChangeOffset < document.getTextLength() && document.getText(TextRange(maxChangeOffset, maxChangeOffset + 1))[0] == tailChar) {
+ fun isCharAt(offset: Int, c: Char) = offset < document.getTextLength() && document.getText(TextRange(offset, offset + 1))[0] == c
+
+ if (isCharAt(maxChangeOffset, tailChar)) {
document.deleteString(maxChangeOffset, maxChangeOffset + 1)
- if (spaceAfter && maxChangeOffset < document.getTextLength() && document.getText(TextRange(maxChangeOffset, maxChangeOffset + 1)) == " ") {
+ if (spaceAfter && isCharAt(maxChangeOffset, ' ')) {
document.deleteString(maxChangeOffset, maxChangeOffset + 1)
}
}
|
44a35b5d9accc4ecf7b1bbf762e593540bafe6a3
|
hadoop
|
HADOOP-7353. Cleanup FsShell and prevent masking of- RTE stack traces. Contributed by Daryn Sharp.--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1132764 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/hadoop
|
diff --git a/CHANGES.txt b/CHANGES.txt
index 5e21b32de7e84..e2d5828c4aee6 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -280,6 +280,9 @@ Trunk (unreleased changes)
HADOOP-7341. Fix options parsing in CommandFormat (Daryn Sharp via todd)
+ HADOOP-7353. Cleanup FsShell and prevent masking of RTE stack traces.
+ (Daryn Sharp via todd)
+
Release 0.22.0 - Unreleased
INCOMPATIBLE CHANGES
diff --git a/src/java/org/apache/hadoop/fs/FsShell.java b/src/java/org/apache/hadoop/fs/FsShell.java
index b50f0b4a6d951..376ea79586b1e 100644
--- a/src/java/org/apache/hadoop/fs/FsShell.java
+++ b/src/java/org/apache/hadoop/fs/FsShell.java
@@ -18,9 +18,10 @@
package org.apache.hadoop.fs;
import java.io.IOException;
+import java.io.PrintStream;
+import java.util.ArrayList;
import java.util.Arrays;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
+import java.util.LinkedList;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -30,9 +31,6 @@
import org.apache.hadoop.fs.shell.Command;
import org.apache.hadoop.fs.shell.CommandFactory;
import org.apache.hadoop.fs.shell.FsCommand;
-import org.apache.hadoop.fs.shell.PathExceptions.PathNotFoundException;
-import org.apache.hadoop.ipc.RPC;
-import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
@@ -46,23 +44,31 @@ public class FsShell extends Configured implements Tool {
private Trash trash;
protected CommandFactory commandFactory;
+ private final String usagePrefix =
+ "Usage: hadoop fs [generic options]";
+
/**
+ * Default ctor with no configuration. Be sure to invoke
+ * {@link #setConf(Configuration)} with a valid configuration prior
+ * to running commands.
*/
public FsShell() {
this(null);
}
+ /**
+ * Construct a FsShell with the given configuration. Commands can be
+ * executed via {@link #run(String[])}
+ * @param conf the hadoop configuration
+ */
public FsShell(Configuration conf) {
super(conf);
- fs = null;
- trash = null;
- commandFactory = new CommandFactory();
}
protected FileSystem getFS() throws IOException {
- if(fs == null)
+ if (fs == null) {
fs = FileSystem.get(getConf());
-
+ }
return fs;
}
@@ -75,93 +81,145 @@ protected Trash getTrash() throws IOException {
protected void init() throws IOException {
getConf().setQuietMode(true);
+ if (commandFactory == null) {
+ commandFactory = new CommandFactory(getConf());
+ commandFactory.addObject(new Help(), "-help");
+ commandFactory.addObject(new Usage(), "-usage");
+ registerCommands(commandFactory);
+ }
}
+ protected void registerCommands(CommandFactory factory) {
+ // TODO: DFSAdmin subclasses FsShell so need to protect the command
+ // registration. This class should morph into a base class for
+ // commands, and then this method can be abstract
+ if (this.getClass().equals(FsShell.class)) {
+ factory.registerCommands(FsCommand.class);
+ }
+ }
+
/**
* Returns the Trash object associated with this shell.
+ * @return Path to the trash
+ * @throws IOException upon error
*/
public Path getCurrentTrashDir() throws IOException {
return getTrash().getCurrentTrashDir();
}
+ // NOTE: Usage/Help are inner classes to allow access to outer methods
+ // that access commandFactory
+
/**
- * Return an abbreviated English-language desc of the byte length
- * @deprecated Consider using {@link org.apache.hadoop.util.StringUtils#byteDesc} instead.
+ * Display help for commands with their short usage and long description
*/
- @Deprecated
- public static String byteDesc(long len) {
- return StringUtils.byteDesc(len);
- }
+ protected class Usage extends FsCommand {
+ public static final String NAME = "usage";
+ public static final String USAGE = "[cmd ...]";
+ public static final String DESCRIPTION =
+ "Displays the usage for given command or all commands if none\n" +
+ "is specified.";
+
+ @Override
+ protected void processRawArguments(LinkedList<String> args) {
+ if (args.isEmpty()) {
+ printUsage(System.out);
+ } else {
+ for (String arg : args) printUsage(System.out, arg);
+ }
+ }
+ }
/**
- * @deprecated Consider using {@link org.apache.hadoop.util.StringUtils#limitDecimalTo2} instead.
+ * Displays short usage of commands sans the long description
*/
- @Deprecated
- public static synchronized String limitDecimalTo2(double d) {
- return StringUtils.limitDecimalTo2(d);
+ protected class Help extends FsCommand {
+ public static final String NAME = "help";
+ public static final String USAGE = "[cmd ...]";
+ public static final String DESCRIPTION =
+ "Displays help for given command or all commands if none\n" +
+ "is specified.";
+
+ @Override
+ protected void processRawArguments(LinkedList<String> args) {
+ if (args.isEmpty()) {
+ printHelp(System.out);
+ } else {
+ for (String arg : args) printHelp(System.out, arg);
+ }
+ }
}
- private void printHelp(String cmd) {
- String summary = "hadoop fs is the command to execute fs commands. " +
- "The full syntax is: \n\n" +
- "hadoop fs [-fs <local | file system URI>] [-conf <configuration file>]\n\t" +
- "[-D <property=value>]\n\t" +
- "[-report]";
+ /*
+ * The following are helper methods for getInfo(). They are defined
+ * outside of the scope of the Help/Usage class because the run() method
+ * needs to invoke them too.
+ */
- String conf ="-conf <configuration file>: Specify an application configuration file.";
-
- String D = "-D <property=value>: Use value for given property.";
+ // print all usages
+ private void printUsage(PrintStream out) {
+ printInfo(out, null, false);
+ }
- String fs = "-fs [local | <file system URI>]: \tSpecify the file system to use.\n" +
- "\t\tIf not specified, the current configuration is used, \n" +
- "\t\ttaken from the following, in increasing precedence: \n" +
- "\t\t\tcore-default.xml inside the hadoop jar file \n" +
- "\t\t\tcore-site.xml in $HADOOP_CONF_DIR \n" +
- "\t\t'local' means use the local file system as your DFS. \n" +
- "\t\t<file system URI> specifies a particular file system to \n" +
- "\t\tcontact. This argument is optional but if used must appear\n" +
- "\t\tappear first on the command line. Exactly one additional\n" +
- "\t\targument must be specified. \n";
+ // print one usage
+ private void printUsage(PrintStream out, String cmd) {
+ printInfo(out, cmd, false);
+ }
- String help = "-help [cmd]: \tDisplays help for given command or all commands if none\n" +
- "\t\tis specified.\n";
+ // print all helps
+ private void printHelp(PrintStream out) {
+ printInfo(out, null, true);
+ }
- Command instance = commandFactory.getInstance("-" + cmd);
- if (instance != null) {
- printHelp(instance);
- } else if ("fs".equals(cmd)) {
- System.out.println(fs);
- } else if ("conf".equals(cmd)) {
- System.out.println(conf);
- } else if ("D".equals(cmd)) {
- System.out.println(D);
- } else if ("help".equals(cmd)) {
- System.out.println(help);
+ // print one help
+ private void printHelp(PrintStream out, String cmd) {
+ printInfo(out, cmd, true);
+ }
+
+ private void printInfo(PrintStream out, String cmd, boolean showHelp) {
+ if (cmd != null) {
+ // display help or usage for one command
+ Command instance = commandFactory.getInstance("-" + cmd);
+ if (instance == null) {
+ throw new UnknownCommandException(cmd);
+ }
+ if (showHelp) {
+ printInstanceHelp(out, instance);
+ } else {
+ printInstanceUsage(out, instance);
+ }
} else {
- System.out.println(summary);
- for (String thisCmdName : commandFactory.getNames()) {
- instance = commandFactory.getInstance(thisCmdName);
+ // display help or usage for all commands
+ out.println(usagePrefix);
+
+ // display list of short usages
+ ArrayList<Command> instances = new ArrayList<Command>();
+ for (String name : commandFactory.getNames()) {
+ Command instance = commandFactory.getInstance(name);
if (!instance.isDeprecated()) {
System.out.println("\t[" + instance.getUsage() + "]");
+ instances.add(instance);
}
}
- System.out.println("\t[-help [cmd]]\n");
-
- System.out.println(fs);
-
- for (String thisCmdName : commandFactory.getNames()) {
- instance = commandFactory.getInstance(thisCmdName);
- if (!instance.isDeprecated()) {
- printHelp(instance);
+ // display long descriptions for each command
+ if (showHelp) {
+ for (Command instance : instances) {
+ out.println();
+ printInstanceHelp(out, instance);
}
}
- System.out.println(help);
- }
+ out.println();
+ ToolRunner.printGenericCommandUsage(out);
+ }
+ }
+
+ private void printInstanceUsage(PrintStream out, Command instance) {
+ out.println(usagePrefix + " " + instance.getUsage());
}
// TODO: will eventually auto-wrap the text, but this matches the expected
// output for the hdfs tests...
- private void printHelp(Command instance) {
+ private void printInstanceHelp(PrintStream out, Command instance) {
boolean firstLine = true;
for (String line : instance.getDescription().split("\n")) {
String prefix;
@@ -174,120 +232,51 @@ private void printHelp(Command instance) {
System.out.println(prefix + line);
}
}
-
- /**
- * Displays format of commands.
- *
- */
- private void printUsage(String cmd) {
- String prefix = "Usage: java " + FsShell.class.getSimpleName();
-
- Command instance = commandFactory.getInstance(cmd);
- if (instance != null) {
- System.err.println(prefix + " [" + instance.getUsage() + "]");
- } else if ("-fs".equals(cmd)) {
- System.err.println("Usage: java FsShell" +
- " [-fs <local | file system URI>]");
- } else if ("-conf".equals(cmd)) {
- System.err.println("Usage: java FsShell" +
- " [-conf <configuration file>]");
- } else if ("-D".equals(cmd)) {
- System.err.println("Usage: java FsShell" +
- " [-D <[property=value>]");
- } else {
- System.err.println("Usage: java FsShell");
- for (String name : commandFactory.getNames()) {
- instance = commandFactory.getInstance(name);
- if (!instance.isDeprecated()) {
- System.err.println(" [" + instance.getUsage() + "]");
- }
- }
- System.err.println(" [-help [cmd]]");
- System.err.println();
- ToolRunner.printGenericCommandUsage(System.err);
- }
- }
/**
* run
*/
public int run(String argv[]) throws Exception {
- // TODO: This isn't the best place, but this class is being abused with
- // subclasses which of course override this method. There really needs
- // to be a better base class for all commands
- commandFactory.setConf(getConf());
- commandFactory.registerCommands(FsCommand.class);
-
- if (argv.length < 1) {
- printUsage("");
- return -1;
- }
-
- int exitCode = -1;
- int i = 0;
- String cmd = argv[i++];
// initialize FsShell
- try {
- init();
- } catch (RPC.VersionMismatch v) {
- LOG.debug("Version mismatch", v);
- System.err.println("Version Mismatch between client and server" +
- "... command aborted.");
- return exitCode;
- } catch (IOException e) {
- LOG.debug("Error", e);
- System.err.println("Bad connection to FS. Command aborted. Exception: " +
- e.getLocalizedMessage());
- return exitCode;
- }
+ init();
- try {
- Command instance = commandFactory.getInstance(cmd);
- if (instance != null) {
- exitCode = instance.run(Arrays.copyOfRange(argv, i, argv.length));
- } else if ("-help".equals(cmd)) {
- if (i < argv.length) {
- printHelp(argv[i]);
- } else {
- printHelp("");
+ int exitCode = -1;
+ if (argv.length < 1) {
+ printUsage(System.err);
+ } else {
+ String cmd = argv[0];
+ Command instance = null;
+ try {
+ instance = commandFactory.getInstance(cmd);
+ if (instance == null) {
+ throw new UnknownCommandException();
}
- } else {
- System.err.println(cmd + ": Unknown command");
- printUsage("");
- }
- } catch (Exception e) {
- exitCode = 1;
- LOG.debug("Error", e);
- displayError(cmd, e);
- if (e instanceof IllegalArgumentException) {
- exitCode = -1;
- printUsage(cmd);
+ exitCode = instance.run(Arrays.copyOfRange(argv, 1, argv.length));
+ } catch (IllegalArgumentException e) {
+ displayError(cmd, e.getLocalizedMessage());
+ if (instance != null) {
+ printInstanceUsage(System.err, instance);
+ }
+ } catch (Exception e) {
+ // instance.run catches IOE, so something is REALLY wrong if here
+ LOG.debug("Error", e);
+ displayError(cmd, "Fatal internal error");
+ e.printStackTrace(System.err);
}
}
return exitCode;
}
-
- // TODO: this is a quick workaround to accelerate the integration of
- // redesigned commands. this will be removed this once all commands are
- // converted. this change will avoid having to change the hdfs tests
- // every time a command is converted to use path-based exceptions
- private static Pattern[] fnfPatterns = {
- Pattern.compile("File (.*) does not exist\\."),
- Pattern.compile("File does not exist: (.*)"),
- Pattern.compile("`(.*)': specified destination directory doest not exist")
- };
- private void displayError(String cmd, Exception e) {
- String message = e.getLocalizedMessage().split("\n")[0];
- for (Pattern pattern : fnfPatterns) {
- Matcher matcher = pattern.matcher(message);
- if (matcher.matches()) {
- message = new PathNotFoundException(matcher.group(1)).getMessage();
- break;
- }
+
+ private void displayError(String cmd, String message) {
+ for (String line : message.split("\n")) {
+ System.err.println(cmd.substring(1) + ": " + line);
}
- System.err.println(cmd.substring(1) + ": " + message);
}
+ /**
+ * Performs any necessary cleanup
+ * @throws IOException upon error
+ */
public void close() throws IOException {
if (fs != null) {
fs.close();
@@ -297,9 +286,11 @@ public void close() throws IOException {
/**
* main() has some simple utility methods
+ * @param argv the command and its arguments
+ * @throws Exception upon error
*/
public static void main(String argv[]) throws Exception {
- FsShell shell = new FsShell();
+ FsShell shell = newShellInstance();
int res;
try {
res = ToolRunner.run(shell, argv);
@@ -308,4 +299,26 @@ public static void main(String argv[]) throws Exception {
}
System.exit(res);
}
+
+ // TODO: this should be abstract in a base class
+ protected static FsShell newShellInstance() {
+ return new FsShell();
+ }
+
+ /**
+ * The default ctor signals that the command being executed does not exist,
+ * while other ctor signals that a specific command does not exist. The
+ * latter is used by commands that process other commands, ex. -usage/-help
+ */
+ @SuppressWarnings("serial")
+ static class UnknownCommandException extends IllegalArgumentException {
+ private final String cmd;
+ UnknownCommandException() { this(null); }
+ UnknownCommandException(String cmd) { this.cmd = cmd; }
+
+ @Override
+ public String getMessage() {
+ return ((cmd != null) ? "`"+cmd+"': " : "") + "Unknown command";
+ }
+ }
}
diff --git a/src/java/org/apache/hadoop/fs/shell/Command.java b/src/java/org/apache/hadoop/fs/shell/Command.java
index f2507b92c2bce..f9efdfcfb09e1 100644
--- a/src/java/org/apache/hadoop/fs/shell/Command.java
+++ b/src/java/org/apache/hadoop/fs/shell/Command.java
@@ -42,6 +42,13 @@
@InterfaceStability.Evolving
abstract public class Command extends Configured {
+ /** default name of the command */
+ public static String NAME;
+ /** the command's usage switches and arguments format */
+ public static String USAGE;
+ /** the command's long description */
+ public static String DESCRIPTION;
+
protected String[] args;
protected String name;
protected int exitCode = 0;
@@ -70,14 +77,6 @@ protected Command(Configuration conf) {
/** @return the command's name excluding the leading character - */
abstract public String getCommandName();
- /**
- * Name the command
- * @param cmdName as invoked
- */
- public void setCommandName(String cmdName) {
- name = cmdName;
- }
-
protected void setRecursive(boolean flag) {
recursive = flag;
}
@@ -120,14 +119,16 @@ public int runAll() {
* expand arguments, and then process each argument.
* <pre>
* run
- * \-> {@link #processOptions(LinkedList)}
- * \-> {@link #expandArguments(LinkedList)} -> {@link #expandArgument(String)}*
- * \-> {@link #processArguments(LinkedList)}
- * \-> {@link #processArgument(PathData)}*
- * \-> {@link #processPathArgument(PathData)}
- * \-> {@link #processPaths(PathData, PathData...)}
- * \-> {@link #processPath(PathData)}*
- * \-> {@link #processNonexistentPath(PathData)}
+ * |-> {@link #processOptions(LinkedList)}
+ * \-> {@link #processRawArguments(LinkedList)}
+ * |-> {@link #expandArguments(LinkedList)}
+ * | \-> {@link #expandArgument(String)}*
+ * \-> {@link #processArguments(LinkedList)}
+ * |-> {@link #processArgument(PathData)}*
+ * | |-> {@link #processPathArgument(PathData)}
+ * | \-> {@link #processPaths(PathData, PathData...)}
+ * | \-> {@link #processPath(PathData)}*
+ * \-> {@link #processNonexistentPath(PathData)}
* </pre>
* Most commands will chose to implement just
* {@link #processOptions(LinkedList)} and {@link #processPath(PathData)}
@@ -144,7 +145,7 @@ public int run(String...argv) {
"DEPRECATED: Please use '"+ getReplacementCommand() + "' instead.");
}
processOptions(args);
- processArguments(expandArguments(args));
+ processRawArguments(args);
} catch (IOException e) {
displayError(e);
}
@@ -170,6 +171,19 @@ public int run(String...argv) {
*/
protected void processOptions(LinkedList<String> args) throws IOException {}
+ /**
+ * Allows commands that don't use paths to handle the raw arguments.
+ * Default behavior is to expand the arguments via
+ * {@link #expandArguments(LinkedList)} and pass the resulting list to
+ * {@link #processArguments(LinkedList)}
+ * @param args the list of argument strings
+ * @throws IOException
+ */
+ protected void processRawArguments(LinkedList<String> args)
+ throws IOException {
+ processArguments(expandArguments(args));
+ }
+
/**
* Expands a list of arguments into {@link PathData} objects. The default
* behavior is to call {@link #expandArgument(String)} on each element
@@ -353,7 +367,26 @@ public void displayError(String message) {
* @param message warning message to display
*/
public void displayWarning(String message) {
- err.println(getCommandName() + ": " + message);
+ err.println(getName() + ": " + message);
+ }
+
+ /**
+ * The name of the command. Will first try to use the assigned name
+ * else fallback to the command's preferred name
+ * @return name of the command
+ */
+ public String getName() {
+ return (name == null)
+ ? getCommandField("NAME")
+ : name.startsWith("-") ? name.substring(1) : name; // this is a historical method
+ }
+
+ /**
+ * Define the name of the command.
+ * @param name as invoked
+ */
+ public void setName(String name) {
+ this.name = name;
}
/**
@@ -361,7 +394,7 @@ public void displayWarning(String message) {
* @return "name options"
*/
public String getUsage() {
- String cmd = "-" + getCommandName();
+ String cmd = "-" + getName();
String usage = isDeprecated() ? "" : getCommandField("USAGE");
return usage.isEmpty() ? cmd : cmd + " " + usage;
}
@@ -400,9 +433,10 @@ public String getReplacementCommand() {
private String getCommandField(String field) {
String value;
try {
- value = (String)this.getClass().getField(field).get(null);
+ value = this.getClass().getField(field).get(this).toString();
} catch (Exception e) {
- throw new RuntimeException(StringUtils.stringifyException(e));
+ throw new RuntimeException(
+ "failed to get " + this.getClass().getSimpleName()+"."+field, e);
}
return value;
}
diff --git a/src/java/org/apache/hadoop/fs/shell/CommandFactory.java b/src/java/org/apache/hadoop/fs/shell/CommandFactory.java
index c8425145cc0df..f5d9d5a801d9f 100644
--- a/src/java/org/apache/hadoop/fs/shell/CommandFactory.java
+++ b/src/java/org/apache/hadoop/fs/shell/CommandFactory.java
@@ -19,7 +19,8 @@
package org.apache.hadoop.fs.shell;
import java.util.Arrays;
-import java.util.Hashtable;
+import java.util.HashMap;
+import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@@ -35,8 +36,11 @@
@InterfaceStability.Unstable
public class CommandFactory extends Configured implements Configurable {
- private Hashtable<String, Class<? extends Command>> classMap =
- new Hashtable<String, Class<? extends Command>>();
+ private Map<String, Class<? extends Command>> classMap =
+ new HashMap<String, Class<? extends Command>>();
+
+ private Map<String, Command> objectMap =
+ new HashMap<String, Command>();
/** Factory constructor for commands */
public CommandFactory() {
@@ -79,16 +83,22 @@ public void addClass(Class<? extends Command> cmdClass, String ... names) {
}
/**
- * Returns the class implementing the given command. The
- * class must have been registered via
- * {@link #addClass(Class, String...)}
- * @param cmd name of the command
- * @return instance of the requested command
+ * Register the given object as handling the given list of command
+ * names. Avoid calling this method and use
+ * {@link #addClass(Class, String...)} whenever possible to avoid
+ * startup overhead from excessive command object instantiations. This
+ * method is intended only for handling nested non-static classes that
+ * are re-usable. Namely -help/-usage.
+ * @param cmdObject the object implementing the command names
+ * @param names one or more command names that will invoke this class
*/
- protected Class<? extends Command> getClass(String cmd) {
- return classMap.get(cmd);
+ public void addObject(Command cmdObject, String ... names) {
+ for (String name : names) {
+ objectMap.put(name, cmdObject);
+ classMap.put(name, null); // just so it shows up in the list of commands
+ }
}
-
+
/**
* Returns an instance of the class implementing the given command. The
* class must have been registered via
@@ -109,11 +119,13 @@ public Command getInstance(String cmd) {
public Command getInstance(String cmdName, Configuration conf) {
if (conf == null) throw new NullPointerException("configuration is null");
- Command instance = null;
- Class<? extends Command> cmdClass = getClass(cmdName);
- if (cmdClass != null) {
- instance = ReflectionUtils.newInstance(cmdClass, conf);
- instance.setCommandName(cmdName);
+ Command instance = objectMap.get(cmdName);
+ if (instance == null) {
+ Class<? extends Command> cmdClass = classMap.get(cmdName);
+ if (cmdClass != null) {
+ instance = ReflectionUtils.newInstance(cmdClass, conf);
+ instance.setName(cmdName);
+ }
}
return instance;
}
diff --git a/src/java/org/apache/hadoop/fs/shell/Count.java b/src/java/org/apache/hadoop/fs/shell/Count.java
index 891e68a4bf928..219973f25dfc3 100644
--- a/src/java/org/apache/hadoop/fs/shell/Count.java
+++ b/src/java/org/apache/hadoop/fs/shell/Count.java
@@ -54,9 +54,7 @@ public static void registerCommands(CommandFactory factory) {
private boolean showQuotas;
/** Constructor */
- public Count() {
- setCommandName(NAME);
- }
+ public Count() {}
/** Constructor
* @deprecated invoke via {@link FsShell}
@@ -67,7 +65,6 @@ public Count() {
@Deprecated
public Count(String[] cmd, int pos, Configuration conf) {
super(conf);
- setCommandName(NAME);
this.args = Arrays.copyOfRange(cmd, pos, cmd.length);
}
diff --git a/src/java/org/apache/hadoop/fs/shell/Display.java b/src/java/org/apache/hadoop/fs/shell/Display.java
index e650b711bc108..8a05a55310eb2 100644
--- a/src/java/org/apache/hadoop/fs/shell/Display.java
+++ b/src/java/org/apache/hadoop/fs/shell/Display.java
@@ -100,7 +100,7 @@ protected InputStream getInputStream(PathData item) throws IOException {
*/
public static class Text extends Cat {
public static final String NAME = "text";
- public static final String SHORT_USAGE = Cat.USAGE;
+ public static final String USAGE = Cat.USAGE;
public static final String DESCRIPTION =
"Takes a source file and outputs the file in text format.\n" +
"The allowed formats are zip and TextRecordInputStream.";
diff --git a/src/java/org/apache/hadoop/fs/shell/FsCommand.java b/src/java/org/apache/hadoop/fs/shell/FsCommand.java
index 32bec37182b9b..3f397327de3e6 100644
--- a/src/java/org/apache/hadoop/fs/shell/FsCommand.java
+++ b/src/java/org/apache/hadoop/fs/shell/FsCommand.java
@@ -65,8 +65,10 @@ protected FsCommand(Configuration conf) {
super(conf);
}
- public String getCommandName() {
- return name.startsWith("-") ? name.substring(1) : name;
+ // historical abstract method in Command
+ @Override
+ public String getCommandName() {
+ return getName();
}
// abstract method that normally is invoked by runall() which is
diff --git a/src/test/core/org/apache/hadoop/cli/testConf.xml b/src/test/core/org/apache/hadoop/cli/testConf.xml
index 7567719bde963..d8b5785682cba 100644
--- a/src/test/core/org/apache/hadoop/cli/testConf.xml
+++ b/src/test/core/org/apache/hadoop/cli/testConf.xml
@@ -39,7 +39,7 @@
<comparators>
<comparator>
<type>SubstringComparator</type>
- <expected-output>hadoop fs is the command to execute fs commands. The full syntax is</expected-output>
+ <expected-output>Usage: hadoop fs [generic options]</expected-output>
</comparator>
</comparators>
</test>
@@ -730,7 +730,7 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
- <expected-output>^-help \[cmd\]:( |\t)*Displays help for given command or all commands if none( )*</expected-output>
+ <expected-output>^-help \[cmd ...\]:( |\t)*Displays help for given command or all commands if none( )*</expected-output>
</comparator>
<comparator>
<type>RegexpComparator</type>
|
71ab4ddb1907323ff661d6f1c0a87678f95a7571
|
hbase
|
HBASE-12731 Heap occupancy based client pushback--
|
a
|
https://github.com/apache/hbase
|
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java
index 6e75670227eb..5b1d3d273d52 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java
@@ -20,10 +20,13 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
+import com.google.common.base.Preconditions;
+
/**
* Simple exponential backoff policy on for the client that uses a percent^4 times the
* max backoff to generate the backoff time.
@@ -38,9 +41,15 @@ public class ExponentialClientBackoffPolicy implements ClientBackoffPolicy {
public static final long DEFAULT_MAX_BACKOFF = 5 * ONE_MINUTE;
public static final String MAX_BACKOFF_KEY = "hbase.client.exponential-backoff.max";
private long maxBackoff;
+ private float heapOccupancyLowWatermark;
+ private float heapOccupancyHighWatermark;
public ExponentialClientBackoffPolicy(Configuration conf) {
this.maxBackoff = conf.getLong(MAX_BACKOFF_KEY, DEFAULT_MAX_BACKOFF);
+ this.heapOccupancyLowWatermark = conf.getFloat(HConstants.HEAP_OCCUPANCY_LOW_WATERMARK_KEY,
+ HConstants.DEFAULT_HEAP_OCCUPANCY_LOW_WATERMARK);
+ this.heapOccupancyHighWatermark = conf.getFloat(HConstants.HEAP_OCCUPANCY_HIGH_WATERMARK_KEY,
+ HConstants.DEFAULT_HEAP_OCCUPANCY_HIGH_WATERMARK);
}
@Override
@@ -56,16 +65,40 @@ public long getBackoffTime(ServerName serverName, byte[] region, ServerStatistic
return 0;
}
+ // Factor in memstore load
+ double percent = regionStats.getMemstoreLoadPercent() / 100.0;
+
+ // Factor in heap occupancy
+ float heapOccupancy = regionStats.getHeapOccupancyPercent() / 100.0f;
+ if (heapOccupancy >= heapOccupancyLowWatermark) {
+ // If we are higher than the high watermark, we are already applying max
+ // backoff and cannot scale more (see scale() below)
+ if (heapOccupancy > heapOccupancyHighWatermark) {
+ heapOccupancy = heapOccupancyHighWatermark;
+ }
+ percent = Math.max(percent,
+ scale(heapOccupancy, heapOccupancyLowWatermark, heapOccupancyHighWatermark,
+ 0.1, 1.0));
+ }
+
// square the percent as a value less than 1. Closer we move to 100 percent,
// the percent moves to 1, but squaring causes the exponential curve
- double percent = regionStats.getMemstoreLoadPercent() / 100.0;
double multiplier = Math.pow(percent, 4.0);
- // shouldn't ever happen, but just incase something changes in the statistic data
if (multiplier > 1) {
- LOG.warn("Somehow got a backoff multiplier greater than the allowed backoff. Forcing back " +
- "down to the max backoff");
multiplier = 1;
}
return (long) (multiplier * maxBackoff);
}
+
+ /** Scale valueIn in the range [baseMin,baseMax] to the range [limitMin,limitMax] */
+ private static double scale(double valueIn, double baseMin, double baseMax, double limitMin,
+ double limitMax) {
+ Preconditions.checkArgument(baseMin <= baseMax, "Illegal source range [%s,%s]",
+ baseMin, baseMax);
+ Preconditions.checkArgument(limitMin <= limitMax, "Illegal target range [%s,%s]",
+ limitMin, limitMax);
+ Preconditions.checkArgument(valueIn >= baseMin && valueIn <= baseMax,
+ "Value %s must be within the range [%s,%s]", valueIn, baseMin, baseMax);
+ return ((limitMax - limitMin) * (valueIn - baseMin) / (baseMax - baseMin)) + limitMin;
+ }
}
\ No newline at end of file
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ServerStatistics.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ServerStatistics.java
index a3b8e11a6328..c7519be09198 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ServerStatistics.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ServerStatistics.java
@@ -54,15 +54,21 @@ public RegionStatistics getStatsForRegion(byte[] regionName){
return stats.get(regionName);
}
- public static class RegionStatistics{
+ public static class RegionStatistics {
private int memstoreLoad = 0;
+ private int heapOccupancy = 0;
public void update(ClientProtos.RegionLoadStats currentStats) {
this.memstoreLoad = currentStats.getMemstoreLoad();
+ this.heapOccupancy = currentStats.getHeapOccupancy();
}
public int getMemstoreLoadPercent(){
return this.memstoreLoad;
}
+
+ public int getHeapOccupancyPercent(){
+ return this.heapOccupancy;
+ }
}
}
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientExponentialBackoff.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientExponentialBackoff.java
index 88e409d5bafb..3a902d01d3ea 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientExponentialBackoff.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientExponentialBackoff.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.backoff.ExponentialClientBackoffPolicy;
import org.apache.hadoop.hbase.client.backoff.ServerStatistics;
@@ -101,10 +102,42 @@ public void testResultOrdering() {
}
}
+ @Test
+ public void testHeapOccupancyPolicy() {
+ Configuration conf = new Configuration(false);
+ ExponentialClientBackoffPolicy backoff = new ExponentialClientBackoffPolicy(conf);
+
+ ServerStatistics stats = new ServerStatistics();
+ long backoffTime;
+
+ update(stats, 0, 95);
+ backoffTime = backoff.getBackoffTime(server, regionname, stats);
+ assertTrue("Heap occupancy at low watermark had no effect", backoffTime > 0);
+
+ long previous = backoffTime;
+ update(stats, 0, 96);
+ backoffTime = backoff.getBackoffTime(server, regionname, stats);
+ assertTrue("Increase above low watermark should have increased backoff",
+ backoffTime > previous);
+
+ update(stats, 0, 98);
+ backoffTime = backoff.getBackoffTime(server, regionname, stats);
+ assertEquals("We should be using max backoff when at high watermark", backoffTime,
+ ExponentialClientBackoffPolicy.DEFAULT_MAX_BACKOFF);
+ }
+
private void update(ServerStatistics stats, int load) {
ClientProtos.RegionLoadStats stat = ClientProtos.RegionLoadStats.newBuilder()
.setMemstoreLoad
(load).build();
stats.update(regionname, stat);
}
+
+ private void update(ServerStatistics stats, int memstoreLoad, int heapOccupancy) {
+ ClientProtos.RegionLoadStats stat = ClientProtos.RegionLoadStats.newBuilder()
+ .setMemstoreLoad(memstoreLoad)
+ .setHeapOccupancy(heapOccupancy)
+ .build();
+ stats.update(regionname, stat);
+ }
}
\ No newline at end of file
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
index 8585299d6a8d..e84f78aa0007 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
@@ -1128,6 +1128,12 @@ public static enum Modify {
public static final String ENABLE_CLIENT_BACKPRESSURE = "hbase.client.backpressure.enabled";
public static final boolean DEFAULT_ENABLE_CLIENT_BACKPRESSURE = false;
+ public static final String HEAP_OCCUPANCY_LOW_WATERMARK_KEY =
+ "hbase.heap.occupancy.low_water_mark";
+ public static final float DEFAULT_HEAP_OCCUPANCY_LOW_WATERMARK = 0.95f;
+ public static final String HEAP_OCCUPANCY_HIGH_WATERMARK_KEY =
+ "hbase.heap.occupancy.high_water_mark";
+ public static final float DEFAULT_HEAP_OCCUPANCY_HIGH_WATERMARK = 0.98f;
private HConstants() {
// Can't be instantiated with this ctor.
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
index ab86e1e269c7..afd67a1cc530 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
@@ -26218,7 +26218,7 @@ public interface RegionLoadStatsOrBuilder
* <code>optional int32 memstoreLoad = 1 [default = 0];</code>
*
* <pre>
- * percent load on the memstore. Guaranteed to be positive, between 0 and 100
+ * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
boolean hasMemstoreLoad();
@@ -26226,10 +26226,30 @@ public interface RegionLoadStatsOrBuilder
* <code>optional int32 memstoreLoad = 1 [default = 0];</code>
*
* <pre>
- * percent load on the memstore. Guaranteed to be positive, between 0 and 100
+ * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
int getMemstoreLoad();
+
+ // optional int32 heapOccupancy = 2 [default = 0];
+ /**
+ * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
+ *
+ * <pre>
+ * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
+ * We can move this to "ServerLoadStats" should we develop them.
+ * </pre>
+ */
+ boolean hasHeapOccupancy();
+ /**
+ * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
+ *
+ * <pre>
+ * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
+ * We can move this to "ServerLoadStats" should we develop them.
+ * </pre>
+ */
+ int getHeapOccupancy();
}
/**
* Protobuf type {@code RegionLoadStats}
@@ -26292,6 +26312,11 @@ private RegionLoadStats(
memstoreLoad_ = input.readInt32();
break;
}
+ case 16: {
+ bitField0_ |= 0x00000002;
+ heapOccupancy_ = input.readInt32();
+ break;
+ }
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
@@ -26339,7 +26364,7 @@ public com.google.protobuf.Parser<RegionLoadStats> getParserForType() {
* <code>optional int32 memstoreLoad = 1 [default = 0];</code>
*
* <pre>
- * percent load on the memstore. Guaranteed to be positive, between 0 and 100
+ * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
public boolean hasMemstoreLoad() {
@@ -26349,15 +26374,42 @@ public boolean hasMemstoreLoad() {
* <code>optional int32 memstoreLoad = 1 [default = 0];</code>
*
* <pre>
- * percent load on the memstore. Guaranteed to be positive, between 0 and 100
+ * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
public int getMemstoreLoad() {
return memstoreLoad_;
}
+ // optional int32 heapOccupancy = 2 [default = 0];
+ public static final int HEAPOCCUPANCY_FIELD_NUMBER = 2;
+ private int heapOccupancy_;
+ /**
+ * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
+ *
+ * <pre>
+ * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
+ * We can move this to "ServerLoadStats" should we develop them.
+ * </pre>
+ */
+ public boolean hasHeapOccupancy() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
+ *
+ * <pre>
+ * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
+ * We can move this to "ServerLoadStats" should we develop them.
+ * </pre>
+ */
+ public int getHeapOccupancy() {
+ return heapOccupancy_;
+ }
+
private void initFields() {
memstoreLoad_ = 0;
+ heapOccupancy_ = 0;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
@@ -26374,6 +26426,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt32(1, memstoreLoad_);
}
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeInt32(2, heapOccupancy_);
+ }
getUnknownFields().writeTo(output);
}
@@ -26387,6 +26442,10 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(1, memstoreLoad_);
}
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt32Size(2, heapOccupancy_);
+ }
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
@@ -26415,6 +26474,11 @@ public boolean equals(final java.lang.Object obj) {
result = result && (getMemstoreLoad()
== other.getMemstoreLoad());
}
+ result = result && (hasHeapOccupancy() == other.hasHeapOccupancy());
+ if (hasHeapOccupancy()) {
+ result = result && (getHeapOccupancy()
+ == other.getHeapOccupancy());
+ }
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
@@ -26432,6 +26496,10 @@ public int hashCode() {
hash = (37 * hash) + MEMSTORELOAD_FIELD_NUMBER;
hash = (53 * hash) + getMemstoreLoad();
}
+ if (hasHeapOccupancy()) {
+ hash = (37 * hash) + HEAPOCCUPANCY_FIELD_NUMBER;
+ hash = (53 * hash) + getHeapOccupancy();
+ }
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
@@ -26548,6 +26616,8 @@ public Builder clear() {
super.clear();
memstoreLoad_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
+ heapOccupancy_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@@ -26580,6 +26650,10 @@ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats b
to_bitField0_ |= 0x00000001;
}
result.memstoreLoad_ = memstoreLoad_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.heapOccupancy_ = heapOccupancy_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
@@ -26599,6 +26673,9 @@ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos
if (other.hasMemstoreLoad()) {
setMemstoreLoad(other.getMemstoreLoad());
}
+ if (other.hasHeapOccupancy()) {
+ setHeapOccupancy(other.getHeapOccupancy());
+ }
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
@@ -26632,7 +26709,7 @@ public Builder mergeFrom(
* <code>optional int32 memstoreLoad = 1 [default = 0];</code>
*
* <pre>
- * percent load on the memstore. Guaranteed to be positive, between 0 and 100
+ * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
public boolean hasMemstoreLoad() {
@@ -26642,7 +26719,7 @@ public boolean hasMemstoreLoad() {
* <code>optional int32 memstoreLoad = 1 [default = 0];</code>
*
* <pre>
- * percent load on the memstore. Guaranteed to be positive, between 0 and 100
+ * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
public int getMemstoreLoad() {
@@ -26652,7 +26729,7 @@ public int getMemstoreLoad() {
* <code>optional int32 memstoreLoad = 1 [default = 0];</code>
*
* <pre>
- * percent load on the memstore. Guaranteed to be positive, between 0 and 100
+ * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
public Builder setMemstoreLoad(int value) {
@@ -26665,7 +26742,7 @@ public Builder setMemstoreLoad(int value) {
* <code>optional int32 memstoreLoad = 1 [default = 0];</code>
*
* <pre>
- * percent load on the memstore. Guaranteed to be positive, between 0 and 100
+ * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
* </pre>
*/
public Builder clearMemstoreLoad() {
@@ -26675,6 +26752,59 @@ public Builder clearMemstoreLoad() {
return this;
}
+ // optional int32 heapOccupancy = 2 [default = 0];
+ private int heapOccupancy_ ;
+ /**
+ * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
+ *
+ * <pre>
+ * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
+ * We can move this to "ServerLoadStats" should we develop them.
+ * </pre>
+ */
+ public boolean hasHeapOccupancy() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
+ *
+ * <pre>
+ * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
+ * We can move this to "ServerLoadStats" should we develop them.
+ * </pre>
+ */
+ public int getHeapOccupancy() {
+ return heapOccupancy_;
+ }
+ /**
+ * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
+ *
+ * <pre>
+ * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
+ * We can move this to "ServerLoadStats" should we develop them.
+ * </pre>
+ */
+ public Builder setHeapOccupancy(int value) {
+ bitField0_ |= 0x00000002;
+ heapOccupancy_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
+ *
+ * <pre>
+ * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
+ * We can move this to "ServerLoadStats" should we develop them.
+ * </pre>
+ */
+ public Builder clearHeapOccupancy() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ heapOccupancy_ = 0;
+ onChanged();
+ return this;
+ }
+
// @@protoc_insertion_point(builder_scope:RegionLoadStats)
}
@@ -31922,33 +32052,33 @@ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse mul
"\030\003 \001(\0132\004.Get\022-\n\014service_call\030\004 \001(\0132\027.Cop" +
"rocessorServiceCall\"Y\n\014RegionAction\022 \n\006r" +
"egion\030\001 \002(\0132\020.RegionSpecifier\022\016\n\006atomic\030" +
- "\002 \001(\010\022\027\n\006action\030\003 \003(\0132\007.Action\"*\n\017Region" +
- "LoadStats\022\027\n\014memstoreLoad\030\001 \001(\005:\0010\"\266\001\n\021R" +
- "esultOrException\022\r\n\005index\030\001 \001(\r\022\027\n\006resul" +
- "t\030\002 \001(\0132\007.Result\022!\n\texception\030\003 \001(\0132\016.Na" +
- "meBytesPair\0221\n\016service_result\030\004 \001(\0132\031.Co",
- "processorServiceResult\022#\n\tloadStats\030\005 \001(" +
- "\0132\020.RegionLoadStats\"f\n\022RegionActionResul" +
- "t\022-\n\021resultOrException\030\001 \003(\0132\022.ResultOrE" +
- "xception\022!\n\texception\030\002 \001(\0132\016.NameBytesP" +
- "air\"f\n\014MultiRequest\022#\n\014regionAction\030\001 \003(" +
- "\0132\r.RegionAction\022\022\n\nnonceGroup\030\002 \001(\004\022\035\n\t" +
- "condition\030\003 \001(\0132\n.Condition\"S\n\rMultiResp" +
- "onse\022/\n\022regionActionResult\030\001 \003(\0132\023.Regio" +
- "nActionResult\022\021\n\tprocessed\030\002 \001(\010*\'\n\013Cons" +
- "istency\022\n\n\006STRONG\020\000\022\014\n\010TIMELINE\020\0012\205\003\n\rCl",
- "ientService\022 \n\003Get\022\013.GetRequest\032\014.GetRes" +
- "ponse\022)\n\006Mutate\022\016.MutateRequest\032\017.Mutate" +
- "Response\022#\n\004Scan\022\014.ScanRequest\032\r.ScanRes" +
- "ponse\022>\n\rBulkLoadHFile\022\025.BulkLoadHFileRe" +
- "quest\032\026.BulkLoadHFileResponse\022F\n\013ExecSer" +
- "vice\022\032.CoprocessorServiceRequest\032\033.Copro" +
- "cessorServiceResponse\022R\n\027ExecRegionServe" +
- "rService\022\032.CoprocessorServiceRequest\032\033.C" +
- "oprocessorServiceResponse\022&\n\005Multi\022\r.Mul" +
- "tiRequest\032\016.MultiResponseBB\n*org.apache.",
- "hadoop.hbase.protobuf.generatedB\014ClientP" +
- "rotosH\001\210\001\001\240\001\001"
+ "\002 \001(\010\022\027\n\006action\030\003 \003(\0132\007.Action\"D\n\017Region" +
+ "LoadStats\022\027\n\014memstoreLoad\030\001 \001(\005:\0010\022\030\n\rhe" +
+ "apOccupancy\030\002 \001(\005:\0010\"\266\001\n\021ResultOrExcepti" +
+ "on\022\r\n\005index\030\001 \001(\r\022\027\n\006result\030\002 \001(\0132\007.Resu" +
+ "lt\022!\n\texception\030\003 \001(\0132\016.NameBytesPair\0221\n",
+ "\016service_result\030\004 \001(\0132\031.CoprocessorServi" +
+ "ceResult\022#\n\tloadStats\030\005 \001(\0132\020.RegionLoad" +
+ "Stats\"f\n\022RegionActionResult\022-\n\021resultOrE" +
+ "xception\030\001 \003(\0132\022.ResultOrException\022!\n\tex" +
+ "ception\030\002 \001(\0132\016.NameBytesPair\"f\n\014MultiRe" +
+ "quest\022#\n\014regionAction\030\001 \003(\0132\r.RegionActi" +
+ "on\022\022\n\nnonceGroup\030\002 \001(\004\022\035\n\tcondition\030\003 \001(" +
+ "\0132\n.Condition\"S\n\rMultiResponse\022/\n\022region" +
+ "ActionResult\030\001 \003(\0132\023.RegionActionResult\022" +
+ "\021\n\tprocessed\030\002 \001(\010*\'\n\013Consistency\022\n\n\006STR",
+ "ONG\020\000\022\014\n\010TIMELINE\020\0012\205\003\n\rClientService\022 \n" +
+ "\003Get\022\013.GetRequest\032\014.GetResponse\022)\n\006Mutat" +
+ "e\022\016.MutateRequest\032\017.MutateResponse\022#\n\004Sc" +
+ "an\022\014.ScanRequest\032\r.ScanResponse\022>\n\rBulkL" +
+ "oadHFile\022\025.BulkLoadHFileRequest\032\026.BulkLo" +
+ "adHFileResponse\022F\n\013ExecService\022\032.Coproce" +
+ "ssorServiceRequest\032\033.CoprocessorServiceR" +
+ "esponse\022R\n\027ExecRegionServerService\022\032.Cop" +
+ "rocessorServiceRequest\032\033.CoprocessorServ" +
+ "iceResponse\022&\n\005Multi\022\r.MultiRequest\032\016.Mu",
+ "ltiResponseBB\n*org.apache.hadoop.hbase.p" +
+ "rotobuf.generatedB\014ClientProtosH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -32110,7 +32240,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors(
internal_static_RegionLoadStats_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_RegionLoadStats_descriptor,
- new java.lang.String[] { "MemstoreLoad", });
+ new java.lang.String[] { "MemstoreLoad", "HeapOccupancy", });
internal_static_ResultOrException_descriptor =
getDescriptor().getMessageTypes().get(23);
internal_static_ResultOrException_fieldAccessorTable = new
diff --git a/hbase-protocol/src/main/protobuf/Client.proto b/hbase-protocol/src/main/protobuf/Client.proto
index 1a3c43e4a0fb..606ca8df1310 100644
--- a/hbase-protocol/src/main/protobuf/Client.proto
+++ b/hbase-protocol/src/main/protobuf/Client.proto
@@ -356,9 +356,12 @@ message RegionAction {
/*
* Statistics about the current load on the region
*/
-message RegionLoadStats{
- // percent load on the memstore. Guaranteed to be positive, between 0 and 100
+message RegionLoadStats {
+ // Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
optional int32 memstoreLoad = 1 [default = 0];
+ // Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
+ // We can move this to "ServerLoadStats" should we develop them.
+ optional int32 heapOccupancy = 2 [default = 0];
}
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index dd1cf8d78987..2b6f9743c0fb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -5244,6 +5244,7 @@ public ClientProtos.RegionLoadStats getRegionStats() {
ClientProtos.RegionLoadStats.Builder stats = ClientProtos.RegionLoadStats.newBuilder();
stats.setMemstoreLoad((int) (Math.min(100, (this.memstoreSize.get() * 100) / this
.memstoreFlushSize)));
+ stats.setHeapOccupancy((int)rsServices.getHeapMemoryManager().getHeapOccupancyPercent()*100);
return stats.build();
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index 4669f8f59534..5263a99f4fe2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -3140,4 +3140,9 @@ public void updateConfiguration() {
conf.reloadConfiguration();
configurationManager.notifyAllObservers(conf);
}
+
+ @Override
+ public HeapMemoryManager getHeapMemoryManager() {
+ return hMemManager;
+ }
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java
index ddd3e95b0267..112634e9f28b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java
@@ -21,6 +21,7 @@
import static org.apache.hadoop.hbase.HConstants.HFILE_BLOCK_CACHE_SIZE_KEY;
import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryUsage;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.logging.Log;
@@ -57,7 +58,7 @@ public class HeapMemoryManager {
"hbase.regionserver.global.memstore.size.min.range";
public static final String HBASE_RS_HEAP_MEMORY_TUNER_PERIOD =
"hbase.regionserver.heapmemory.tuner.period";
- public static final int HBASE_RS_HEAP_MEMORY_TUNER_DEFAULT_PERIOD = 5 * 60 * 1000;
+ public static final int HBASE_RS_HEAP_MEMORY_TUNER_DEFAULT_PERIOD = 60 * 1000;
public static final String HBASE_RS_HEAP_MEMORY_TUNER_CLASS =
"hbase.regionserver.heapmemory.tuner.class";
@@ -70,12 +71,16 @@ public class HeapMemoryManager {
private float blockCachePercentMaxRange;
private float l2BlockCachePercent;
+ private float heapOccupancyPercent;
+
private final ResizableBlockCache blockCache;
private final FlushRequester memStoreFlusher;
private final Server server;
private HeapMemoryTunerChore heapMemTunerChore = null;
private final boolean tunerOn;
+ private final int defaultChorePeriod;
+ private final float heapOccupancyLowWatermark;
private long maxHeapSize = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getMax();
@@ -91,10 +96,15 @@ public static HeapMemoryManager create(Configuration conf, FlushRequester memSto
@VisibleForTesting
HeapMemoryManager(ResizableBlockCache blockCache, FlushRequester memStoreFlusher,
Server server) {
+ Configuration conf = server.getConfiguration();
this.blockCache = blockCache;
this.memStoreFlusher = memStoreFlusher;
this.server = server;
- this.tunerOn = doInit(server.getConfiguration());
+ this.tunerOn = doInit(conf);
+ this.defaultChorePeriod = conf.getInt(HBASE_RS_HEAP_MEMORY_TUNER_PERIOD,
+ HBASE_RS_HEAP_MEMORY_TUNER_DEFAULT_PERIOD);
+ this.heapOccupancyLowWatermark = conf.getFloat(HConstants.HEAP_OCCUPANCY_LOW_WATERMARK_KEY,
+ HConstants.DEFAULT_HEAP_OCCUPANCY_LOW_WATERMARK);
}
private boolean doInit(Configuration conf) {
@@ -174,10 +184,10 @@ private boolean doInit(Configuration conf) {
}
public void start() {
+ LOG.info("Starting HeapMemoryTuner chore.");
+ this.heapMemTunerChore = new HeapMemoryTunerChore();
+ Threads.setDaemonThreadRunning(heapMemTunerChore.getThread());
if (tunerOn) {
- LOG.info("Starting HeapMemoryTuner chore.");
- this.heapMemTunerChore = new HeapMemoryTunerChore();
- Threads.setDaemonThreadRunning(heapMemTunerChore.getThread());
// Register HeapMemoryTuner as a memstore flush listener
memStoreFlusher.registerFlushRequestListener(heapMemTunerChore);
}
@@ -185,10 +195,8 @@ public void start() {
public void stop() {
// The thread is Daemon. Just interrupting the ongoing process.
- if (tunerOn) {
- LOG.info("Stoping HeapMemoryTuner chore.");
- this.heapMemTunerChore.interrupt();
- }
+ LOG.info("Stoping HeapMemoryTuner chore.");
+ this.heapMemTunerChore.interrupt();
}
// Used by the test cases.
@@ -196,16 +204,23 @@ boolean isTunerOn() {
return this.tunerOn;
}
+ /**
+ * @return heap occupancy percentage, 0 <= n <= 1
+ */
+ public float getHeapOccupancyPercent() {
+ return this.heapOccupancyPercent;
+ }
+
private class HeapMemoryTunerChore extends Chore implements FlushRequestListener {
private HeapMemoryTuner heapMemTuner;
private AtomicLong blockedFlushCount = new AtomicLong();
private AtomicLong unblockedFlushCount = new AtomicLong();
private long evictCount = 0L;
private TunerContext tunerContext = new TunerContext();
+ private boolean alarming = false;
public HeapMemoryTunerChore() {
- super(server.getServerName() + "-HeapMemoryTunerChore", server.getConfiguration().getInt(
- HBASE_RS_HEAP_MEMORY_TUNER_PERIOD, HBASE_RS_HEAP_MEMORY_TUNER_DEFAULT_PERIOD), server);
+ super(server.getServerName() + "-HeapMemoryTunerChore", defaultChorePeriod, server);
Class<? extends HeapMemoryTuner> tunerKlass = server.getConfiguration().getClass(
HBASE_RS_HEAP_MEMORY_TUNER_CLASS, DefaultHeapMemoryTuner.class, HeapMemoryTuner.class);
heapMemTuner = ReflectionUtils.newInstance(tunerKlass, server.getConfiguration());
@@ -213,6 +228,41 @@ public HeapMemoryTunerChore() {
@Override
protected void chore() {
+ // Sample heap occupancy
+ MemoryUsage memUsage = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();
+ heapOccupancyPercent = (float)memUsage.getUsed() / (float)memUsage.getCommitted();
+ // If we are above the heap occupancy alarm low watermark, switch to short
+ // sleeps for close monitoring. Stop autotuning, we are in a danger zone.
+ if (heapOccupancyPercent >= heapOccupancyLowWatermark) {
+ if (!alarming) {
+ LOG.warn("heapOccupancyPercent " + heapOccupancyPercent +
+ " is above heap occupancy alarm watermark (" + heapOccupancyLowWatermark + ")");
+ alarming = true;
+ }
+ getSleeper().skipSleepCycle();
+ try {
+ // Need to sleep ourselves since we've told the chore's sleeper
+ // to skip the next sleep cycle.
+ Thread.sleep(1000);
+ } catch (InterruptedException e) {
+ // Interrupted, propagate
+ Thread.currentThread().interrupt();
+ }
+ } else {
+ if (alarming) {
+ LOG.info("heapOccupancyPercent " + heapOccupancyPercent +
+ " is now below the heap occupancy alarm watermark (" +
+ heapOccupancyLowWatermark + ")");
+ alarming = false;
+ }
+ }
+ // Autotune if tuning is enabled and allowed
+ if (tunerOn && !alarming) {
+ tune();
+ }
+ }
+
+ private void tune() {
evictCount = blockCache.getStats().getEvictedCount() - evictCount;
tunerContext.setBlockedFlushCount(blockedFlushCount.getAndSet(0));
tunerContext.setUnblockedFlushCount(unblockedFlushCount.getAndSet(0));
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java
index 08d038c8669e..3565195f8267 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java
@@ -149,4 +149,8 @@ void postOpenDeployTasks(final HRegion r)
*/
boolean registerService(Service service);
+ /**
+ * @return heap memory manager instance
+ */
+ HeapMemoryManager getHeapMemoryManager();
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java
index e7111e2495b2..e6e98f28c4ac 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java
@@ -41,6 +41,7 @@
import org.apache.hadoop.hbase.regionserver.CompactionRequestor;
import org.apache.hadoop.hbase.regionserver.FlushRequester;
import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.HeapMemoryManager;
import org.apache.hadoop.hbase.regionserver.Leases;
import org.apache.hadoop.hbase.regionserver.RegionServerAccounting;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
@@ -280,4 +281,9 @@ public boolean registerService(Service service) {
// TODO Auto-generated method stub
return false;
}
+
+ @Override
+ public HeapMemoryManager getHeapMemoryManager() {
+ return null;
+ }
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java
index 82d224b022b9..0fc33db2467f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java
@@ -94,6 +94,7 @@
import org.apache.hadoop.hbase.regionserver.CompactionRequestor;
import org.apache.hadoop.hbase.regionserver.FlushRequester;
import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.HeapMemoryManager;
import org.apache.hadoop.hbase.regionserver.Leases;
import org.apache.hadoop.hbase.regionserver.RegionServerAccounting;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
@@ -614,4 +615,9 @@ public UpdateConfigurationResponse updateConfiguration(
throws ServiceException {
return null;
}
+
+ @Override
+ public HeapMemoryManager getHeapMemoryManager() {
+ return null;
+ }
}
\ No newline at end of file
|
c5c87c4a4809078b14a2ce49c8da6010dbad17b7
|
elasticsearch
|
[TEST] Don't delete data dirs after test - only- delete their content.--Closes -5815-
|
c
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java b/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java
index 103ea959ec4ab..5da1c0759be0b 100644
--- a/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java
+++ b/src/main/java/org/elasticsearch/common/io/FileSystemUtils.java
@@ -74,20 +74,23 @@ public static boolean exists(File... files) {
return false;
}
- public static boolean deleteRecursively(File[] roots) {
+ /**
+ * Deletes the given files recursively. if <tt>deleteRoots</tt> is set to <code>true</code>
+ * the given root files will be deleted as well. Otherwise only their content is deleted.
+ */
+ public static boolean deleteRecursively(File[] roots, boolean deleteRoots) {
boolean deleted = true;
for (File root : roots) {
- deleted &= deleteRecursively(root);
+ deleted &= deleteRecursively(root, deleteRoots);
}
return deleted;
}
- public static boolean deleteRecursively(File root) {
- return deleteRecursively(root, true);
- }
-
- private static boolean innerDeleteRecursively(File root) {
- return deleteRecursively(root, true);
+ /**
+ * Deletes the given files recursively including the given roots.
+ */
+ public static boolean deleteRecursively(File... roots) {
+ return deleteRecursively(roots, true);
}
/**
@@ -105,7 +108,7 @@ public static boolean deleteRecursively(File root, boolean deleteRoot) {
File[] children = root.listFiles();
if (children != null) {
for (File aChildren : children) {
- innerDeleteRecursively(aChildren);
+ deleteRecursively(aChildren, true);
}
}
}
diff --git a/src/test/java/org/elasticsearch/test/TestCluster.java b/src/test/java/org/elasticsearch/test/TestCluster.java
index 5ca85ad10b06a..2e905192927ca 100644
--- a/src/test/java/org/elasticsearch/test/TestCluster.java
+++ b/src/test/java/org/elasticsearch/test/TestCluster.java
@@ -762,10 +762,11 @@ private void resetClients() {
private void wipeDataDirectories() {
if (!dataDirToClean.isEmpty()) {
- logger.info("Wipe data directory for all nodes locations: {}", this.dataDirToClean);
+ boolean deleted = false;
try {
- FileSystemUtils.deleteRecursively(dataDirToClean.toArray(new File[dataDirToClean.size()]));
+ deleted = FileSystemUtils.deleteRecursively(dataDirToClean.toArray(new File[dataDirToClean.size()]), false);
} finally {
+ logger.info("Wipe data directory for all nodes locations: {} success: {}", this.dataDirToClean, deleted);
this.dataDirToClean.clear();
}
}
|
2299b927c8dbfaad4761ed07c3c709e8e5d1c3b8
|
orientdb
|
fixed bug on shutdown--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/src/main/java/com/orientechnologies/lucene/manager/OLuceneIndexManagerAbstract.java b/src/main/java/com/orientechnologies/lucene/manager/OLuceneIndexManagerAbstract.java
index b36472b5846..f55130922a8 100644
--- a/src/main/java/com/orientechnologies/lucene/manager/OLuceneIndexManagerAbstract.java
+++ b/src/main/java/com/orientechnologies/lucene/manager/OLuceneIndexManagerAbstract.java
@@ -121,8 +121,8 @@ protected void initIndex(String indexName, OIndexDefinition indexDefinition, Str
private void reOpen(ODocument metadata) throws IOException {
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage().getUnderlying();
- Directory dir = NIOFSDirectory.open(new File(storageLocalAbstract.getStoragePath() + File.separator + OLUCENE_BASE_DIR
- + File.separator + indexName));
+ String pathname = getIndexPath(storageLocalAbstract);
+ Directory dir = NIOFSDirectory.open(new File(pathname));
indexWriter = createIndexWriter(dir, metadata);
mgrWriter = new TrackingIndexWriter(indexWriter);
manager = new SearcherManager(indexWriter, true, null);
@@ -133,6 +133,10 @@ private void reOpen(ODocument metadata) throws IOException {
nrt.start();
}
+ private String getIndexPath(OStorageLocalAbstract storageLocalAbstract) {
+ return storageLocalAbstract.getStoragePath() + File.separator + OLUCENE_BASE_DIR + File.separator + indexName;
+ }
+
protected IndexSearcher getSearcher() throws IOException {
try {
nrt.waitForGeneration(reopenToken);
@@ -184,12 +188,15 @@ public void delete() {
try {
if (indexWriter != null) {
indexWriter.deleteAll();
+
+ nrt.interrupt();
+ nrt.close();
+
indexWriter.close();
- indexWriter.getDirectory().close();
}
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage().getUnderlying();
- File f = new File(storageLocalAbstract.getStoragePath() + File.separator + indexName);
+ File f = new File(getIndexPath(storageLocalAbstract));
OLuceneIndexUtils.deleteFolder(f);
@@ -259,7 +266,9 @@ public void flush() {
@Override
public void close() {
try {
+ nrt.interrupt();
nrt.close();
+ indexWriter.commit();
indexWriter.close();
} catch (IOException e) {
e.printStackTrace();
@@ -329,7 +338,15 @@ public Analyzer getAnalyzer(ODocument metadata) {
} catch (ClassNotFoundException e) {
throw new OIndexException("Analyzer: " + analyzerString + " not found", e);
} catch (NoSuchMethodException e) {
- e.printStackTrace();
+ Class classAnalyzer = null;
+ try {
+ classAnalyzer = Class.forName(analyzerString);
+ analyzer = (Analyzer) classAnalyzer.newInstance();
+
+ } catch (Throwable e1) {
+ throw new OIndexException("Couldn't instantiate analyzer: public constructor not found", e1);
+ }
+
} catch (InvocationTargetException e) {
e.printStackTrace();
} catch (InstantiationException e) {
diff --git a/src/test/java/com/orientechnologies/test/lucene-local-test.xml b/src/test/java/com/orientechnologies/test/lucene-local-test.xml
new file mode 100755
index 00000000000..4a451ed031a
--- /dev/null
+++ b/src/test/java/com/orientechnologies/test/lucene-local-test.xml
@@ -0,0 +1,194 @@
+<!DOCTYPE suite SYSTEM "http://beust.com/testng/testng-1.0.dtd">
+<suite name="Local Test Suite" verbose="2" parallel="false">
+
+ <parameter name="path" value="@PATH@"/>
+ <parameter name="url" value="@URL@"/>
+ <parameter name="testPath" value="@TESTPATH@"/>
+
+ <test name="Setup">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.base.DeleteDirectory"/>
+ </classes>
+ </test>
+
+ <test name="DbCreation">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.DbListenerTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.DbCreationTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.StorageTest"/>
+ </classes>
+ </test>
+ <test name="Schema">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.SchemaTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.AbstractClassTest"/>
+ </classes>
+ </test>
+ <test name="Security">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.SecurityTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.RestrictedTest"/>
+ </classes>
+ </test>
+ <test name="Hook">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.HookTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.HookTxTest"/>
+ </classes>
+ </test>
+ <test name="Population">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.ComplexTypesTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.CRUDFlatPhysicalTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.CRUDObjectInheritanceTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.CRUDDocumentPhysicalTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.CRUDObjectPhysicalTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.CRUDFlatPhysicalTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.CRUDDocumentValidationTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.RecordMetadataTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.ObjectTreeTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.ObjectDetachingTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.ObjectEnhancingTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.DocumentTrackingTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.EmbeddedObjectSerializationTest"/>
+ </classes>
+ </test>
+ <test name="PopulationObjectSchemaFull">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.CRUDObjectInheritanceTestSchemaFull"/>
+ <class name="com.orientechnologies.orient.test.database.auto.CRUDObjectPhysicalTestSchemaFull"/>
+ <class name="com.orientechnologies.orient.test.database.auto.ObjectTreeTestSchemaFull"/>
+ <class name="com.orientechnologies.orient.test.database.auto.ObjectDetachingTestSchemaFull"/>
+ <class name="com.orientechnologies.orient.test.database.auto.ObjectEnhancingTestSchemaFull"/>
+ </classes>
+ </test>
+ <test name="Tx">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.TransactionAtomicTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.TransactionOptimisticTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.TransactionConsistencyTest"/>
+ </classes>
+ </test>
+ <test name="Index">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.DateIndexTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLEscapingTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLSelectHashIndexReuseTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.IndexTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.IndexCustomKeyTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.IndexClusterTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.ByteArrayKeyTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.FullTextIndexTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.ClassIndexManagerTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLSelectIndexReuseTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLCreateIndexTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLDropIndexTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLDropClassIndexTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLDropPropertyIndexTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SchemaIndexTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.ClassIndexTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.PropertyIndexTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.CollectionIndexTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLSelectCompositeIndexDirectSearchTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.IndexTxAwareOneValueGetValuesTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.IndexTxAwareMultiValueGetValuesTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.IndexTxAwareMultiValueGetTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.IndexTxAwareOneValueGetTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.IndexTxAwareMultiValueGetEntriesTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.IndexTxAwareOneValueGetEntriesTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.MapIndexTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLSelectByLinkedPropertyIndexReuseTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.LinkListIndexTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.LinkMapIndexTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLIndexWithoutSchemaTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.IndexTxTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.OrderByIndexReuseTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.LinkSetIndexTest"/>
+ </classes>
+ </test>
+ <test name="Dictionary">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.DictionaryTest"/>
+ </classes>
+ </test>
+ <test name="Query">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.WrongQueryTest"/>
+ </classes>
+ </test>
+ <test name="Parsing">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.JSONTest"/>
+ </classes>
+ </test>
+ <test name="Graph">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.GraphDatabaseTest"/>
+ <!-- <class name="com.orientechnologies.orient.test.database.auto.SQLCreateVertexAndEdgeTest"/> -->
+ </classes>
+ </test>
+ <test name="GEO">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.GEOTest"/>
+ </classes>
+ </test>
+ <test name="Index Manager">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.IndexManagerTest"/>
+ </classes>
+ </test>
+ <test name="Binary">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.BinaryTest"/>
+ </classes>
+ </test>
+ <test name="sql-commands">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLCommandsTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLInsertTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLSelectTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLMetadataTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLSelectProjectionsTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLSelectGroupByTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLFunctionsTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLUpdateTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLDeleteTest"/>
+ </classes>
+ </test>
+ <test name="other-commands">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.TraverseTest"/>
+ </classes>
+ </test>
+ <test name="misc">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.TruncateTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.DateTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLFindReferencesTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.SQLCreateLinkTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.MultipleDBTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.ConcurrentUpdatesTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.ConcurrentQueriesTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.DatabaseThreadFactoryTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.CollateTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.PoolTest"/>
+ </classes>
+ </test>
+ <test name="DbTools">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.DbCheckTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.DbImportExportTest"/>
+ <class name="com.orientechnologies.orient.test.database.auto.DbCompareTest"/>
+ </classes>
+ </test>
+ <test name="DbToolsDelete">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.DbDeleteTest"/>
+ </classes>
+ </test>
+ <test name="End">
+ <classes>
+ <class name="com.orientechnologies.orient.test.database.auto.DbClosedTest"/>
+ </classes>
+ </test>
+</suite>
\ No newline at end of file
|
74833e575b909e9bff3d512be4c748c81f7fab4a
|
intellij-community
|
IDEADEV-1678, IDEADEV-1685 Added icons count on- folders in thumbnails view, Added EditExternaly to editors actions--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/plugins/images/META-INF/plugin.xml b/plugins/images/META-INF/plugin.xml
index 470cefe538629..28ebe55497823 100644
--- a/plugins/images/META-INF/plugin.xml
+++ b/plugins/images/META-INF/plugin.xml
@@ -37,7 +37,8 @@
<actions>
<action class="org.intellij.images.actions.EditExternalyAction" text="Jump to External Editor"
- id="Images.EditExternaly" description="Open image in external editor">
+ id="Images.EditExternaly" description="Open image in external editor"
+ icon="/org/intellij/images/icons/EditExternaly.png">
<add-to-group anchor="after" group-id="ProjectViewPopupMenu" relative-to-action="EditSource"/>
<add-to-group anchor="after" group-id="CommanderPopupMenu" relative-to-action="EditSource"/>
</action>
@@ -47,17 +48,19 @@
<add-to-group anchor="after" group-id="CommanderPopupMenu" relative-to-action="AddToFavorites"/>
</action>
<group id="Images.FileViewPopupMenu">
- <reference id="Images.EditExternaly" />
- <reference id="Images.ShowThumbnails" />
+ <reference id="Images.EditExternaly"/>
+ <reference id="Images.ShowThumbnails"/>
</group>
<group id="Images.ThumbnailsPopupMenu">
- <reference id="EditSource" />
- <reference id="CopyPaths" />
- <reference id="Images.EditExternaly" />
+ <reference id="EditSource"/>
+ <reference id="CopyPaths"/>
+ <reference id="Images.EditExternaly"/>
</group>
<group id="Images.EditorToolbar">
- <action class="org.intellij.images.editor.actions.ToggleTransparencyChessboardAction" text="Toggle Transparency Chessboard"
- id="Images.Editor.ToggleTransparencyChessboard" description="Toggle transparency chessboard under image"
+ <action class="org.intellij.images.editor.actions.ToggleTransparencyChessboardAction"
+ text="Toggle Transparency Chessboard"
+ id="Images.Editor.ToggleTransparencyChessboard"
+ description="Toggle transparency chessboard under image"
icon="/org/intellij/images/icons/ToggleTransparencyChessboard.png"/>
<action class="org.intellij.images.editor.actions.ToggleGridAction" text="Toggle Grid"
id="Images.Editor.ToggleGrid" description="Toggle grid lines over image"
@@ -73,6 +76,7 @@
id="Images.Editor.ActualSize" description="Resize image to actual size"
icon="/org/intellij/images/icons/ActualSize.png"/>
<separator/>
+ <reference id="Images.EditExternaly"/>
</group>
<group id="Images.ThumbnailsToolbar">
<action class="org.intellij.images.thumbnail.actions.UpFolderAction" text="Level Up"
@@ -81,11 +85,13 @@
<action class="org.intellij.images.thumbnail.actions.ToggleRecursiveAction" text="Recursive"
id="Images.Thumbnails.ToggleRecursive" description="Toggle recursive browsing"
icon="/objectBrowser/flattenPackages.png"/>
- <separator />
- <action class="org.intellij.images.thumbnail.actions.ToggleTransparencyChessboardAction" text="Toggle Transparency Chessboard"
- id="Images.Thumbnails.ToggleTransparencyChessboard" description="Toggle transparency chessboard under image"
+ <separator/>
+ <action class="org.intellij.images.thumbnail.actions.ToggleTransparencyChessboardAction"
+ text="Toggle Transparency Chessboard"
+ id="Images.Thumbnails.ToggleTransparencyChessboard"
+ description="Toggle transparency chessboard under image"
icon="/org/intellij/images/icons/ToggleTransparencyChessboard.png"/>
- <separator />
+ <separator/>
<action class="org.intellij.images.thumbnail.actions.HideThumbnailsAction" text="Close"
id="Images.Thumbnails.Hide" description="Hide tool window"
icon="/actions/cancel.png"/>
diff --git a/plugins/images/src/org/intellij/images/actions/EditExternalyAction.java b/plugins/images/src/org/intellij/images/actions/EditExternalyAction.java
index 76dd27e3ea5bf..a9e6cb401ccf8 100644
--- a/plugins/images/src/org/intellij/images/actions/EditExternalyAction.java
+++ b/plugins/images/src/org/intellij/images/actions/EditExternalyAction.java
@@ -6,11 +6,11 @@
import com.intellij.openapi.actionSystem.DataConstants;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.project.Project;
-import com.intellij.openapi.ui.Messages;
-import com.intellij.openapi.util.text.StringUtil;
-import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.LocalFileSystem;
+import com.intellij.openapi.vfs.VfsUtil;
+import com.intellij.openapi.util.text.StringUtil;
+import com.intellij.openapi.ui.Messages;
import org.intellij.images.fileTypes.ImageFileTypeManager;
import org.intellij.images.options.Options;
import org.intellij.images.options.OptionsManager;
diff --git a/plugins/images/src/org/intellij/images/editor/ImageEditor.java b/plugins/images/src/org/intellij/images/editor/ImageEditor.java
index e55d64005d554..2a00e6b46e2ed 100644
--- a/plugins/images/src/org/intellij/images/editor/ImageEditor.java
+++ b/plugins/images/src/org/intellij/images/editor/ImageEditor.java
@@ -4,6 +4,7 @@
import com.intellij.openapi.Disposable;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.VirtualFileListener;
+ import com.intellij.openapi.vfs.VirtualFile;
import javax.swing.*;
@@ -13,6 +14,11 @@
* @author <a href="mailto:[email protected]">Alexey Efimov</a>
*/
public interface ImageEditor extends Disposable, VirtualFileListener {
+ /**
+ * Get file
+ */
+ VirtualFile getFile();
+
/**
* Get editor project.
*/
@@ -34,6 +40,11 @@ public interface ImageEditor extends Disposable, VirtualFileListener {
*/
JComponent getContentComponent();
+ /**
+ * Return <code>true</code> if editor show valid image.
+ */
+ boolean isValid();
+
/**
* Return <code>true</code> if editor is already disposed.
*/
diff --git a/plugins/images/src/org/intellij/images/editor/ImageFileEditor.java b/plugins/images/src/org/intellij/images/editor/ImageFileEditor.java
index abf7e7a01a318..747b6e26945fb 100644
--- a/plugins/images/src/org/intellij/images/editor/ImageFileEditor.java
+++ b/plugins/images/src/org/intellij/images/editor/ImageFileEditor.java
@@ -2,9 +2,6 @@
import com.intellij.openapi.fileEditor.FileEditor;
-/**
- * @author <a href="mailto:[email protected]">Alexey Efimov</a>
- */
public interface ImageFileEditor extends FileEditor {
ImageEditor getImageEditor();
}
diff --git a/plugins/images/src/org/intellij/images/editor/impl/ImageEditorImpl.java b/plugins/images/src/org/intellij/images/editor/impl/ImageEditorImpl.java
index 7e1835b96cf6c..634a7416026fe 100644
--- a/plugins/images/src/org/intellij/images/editor/impl/ImageEditorImpl.java
+++ b/plugins/images/src/org/intellij/images/editor/impl/ImageEditorImpl.java
@@ -1,6 +1,5 @@
package org.intellij.images.editor.impl;
-import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileAdapter;
@@ -13,9 +12,10 @@
import org.intellij.images.vfs.IfsUtil;
import javax.swing.*;
+import java.awt.*;
+import java.awt.image.BufferedImage;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
-import java.io.IOException;
/**
* Image viewer implementation.
@@ -23,21 +23,58 @@
* @author <a href="mailto:[email protected]">Alexey Efimov</a>
*/
final class ImageEditorImpl extends VirtualFileAdapter implements ImageEditor {
- private static final Logger LOGGER = Logger.getInstance("ImageEditor");
private final PropertyChangeListener optionsChangeListener = new OptionsChangeListener();
private final Project project;
- private final ImageEditorUI editorUI;
private final VirtualFile file;
+ private final ImageEditorUI editorUI;
private boolean disposed = false;
- ImageEditorImpl(Project project, VirtualFile file) throws IOException {
+ ImageEditorImpl(Project project, VirtualFile file) {
this.project = project;
this.file = file;
// Options
Options options = OptionsManager.getInstance().getOptions();
- editorUI = new ImageEditorUI(IfsUtil.getImage(file), options.getEditorOptions());
+ editorUI = new ImageEditorUI(options.getEditorOptions());
options.addPropertyChangeListener(optionsChangeListener);
+
+ setValue(file);
+ }
+
+ private void setValue(VirtualFile file) {
+ ImageDocument document = editorUI.getImageComponent().getDocument();
+ try {
+ BufferedImage previousImage = document.getValue();
+ BufferedImage image = IfsUtil.getImage(file);
+ document.setValue(image);
+ if (image != null && previousImage == null) {
+ // Set smart zooming behaviour on open
+ Options options = OptionsManager.getInstance().getOptions();
+ ZoomOptions zoomOptions = options.getEditorOptions().getZoomOptions();
+ // Open as actual size
+ ImageZoomModel zoomModel = getZoomModel();
+ zoomModel.setZoomFactor(1.0d);
+
+ if (zoomOptions.isSmartZooming()) {
+ Dimension prefferedSize = zoomOptions.getPrefferedSize();
+ if (prefferedSize.width > image.getWidth() && prefferedSize.height > image.getHeight()) {
+ // Resize to preffered size
+ // Calculate zoom factor
+
+ double factor = (prefferedSize.getWidth() / (double)image.getWidth() + prefferedSize.getHeight() / (double)image.getHeight()) / 2.0d;
+ zoomModel.setZoomFactor(Math.ceil(factor));
+ }
+ }
+ }
+ } catch (Exception e) {
+ // Error loading image file
+ document.setValue(null);
+ }
+ }
+
+ public boolean isValid() {
+ ImageDocument document = editorUI.getImageComponent().getDocument();
+ return document.getValue() != null;
}
public JComponent getComponent() {
@@ -48,6 +85,10 @@ public JComponent getContentComponent() {
return editorUI.getImageComponent();
}
+ public VirtualFile getFile() {
+ return file;
+ }
+
public Project getProject() {
return project;
}
@@ -95,11 +136,7 @@ public void contentsChanged(VirtualFileEvent virtualFileEvent) {
// Change document
file.refresh(true, false, new Runnable() {
public void run() {
- try {
- editorUI.getImageComponent().getDocument().setValue(IfsUtil.getImage(file));
- } catch (IOException e) {
- LOGGER.error(e);
- }
+ setValue(file);
}
});
diff --git a/plugins/images/src/org/intellij/images/editor/impl/ImageEditorManagerImpl.java b/plugins/images/src/org/intellij/images/editor/impl/ImageEditorManagerImpl.java
index 3121f55e264f0..f795bc11185ed 100644
--- a/plugins/images/src/org/intellij/images/editor/impl/ImageEditorManagerImpl.java
+++ b/plugins/images/src/org/intellij/images/editor/impl/ImageEditorManagerImpl.java
@@ -7,8 +7,6 @@
import org.intellij.images.editor.ImageEditorManager;
import org.jetbrains.annotations.NotNull;
-import java.io.IOException;
-
/**
* Image viewer manager implementation.
*
@@ -28,7 +26,7 @@ public void disposeComponent() {
}
@NotNull
- public ImageEditor createImageEditor(@NotNull Project project, @NotNull VirtualFile file) throws IOException {
+ public ImageEditor createImageEditor(@NotNull Project project, @NotNull VirtualFile file) {
return new ImageEditorImpl(project, file);
}
diff --git a/plugins/images/src/org/intellij/images/editor/impl/ImageEditorUI.java b/plugins/images/src/org/intellij/images/editor/impl/ImageEditorUI.java
index 3e044ef56ed06..30a6ee5605ecb 100644
--- a/plugins/images/src/org/intellij/images/editor/impl/ImageEditorUI.java
+++ b/plugins/images/src/org/intellij/images/editor/impl/ImageEditorUI.java
@@ -3,6 +3,7 @@
import com.intellij.openapi.actionSystem.ActionGroup;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.ActionToolbar;
+import com.intellij.openapi.ui.Messages;
import org.intellij.images.editor.ImageDocument;
import org.intellij.images.editor.ImageZoomModel;
import org.intellij.images.editor.actionSystem.ImageEditorActions;
@@ -25,12 +26,16 @@
* @author <a href="mailto:[email protected]">Alexey Efimov</a>
*/
final class ImageEditorUI extends JPanel {
+ public static final String IMAGE_PANEL = "image";
+ public static final String ERROR_PANEL = "error";
+
private final ImageZoomModel zoomModel = new ImageZoomModelImpl();
private final ImageWheelAdapter wheelAdapter = new ImageWheelAdapter();
private final ChangeListener changeListener = new DocumentChangeListener();
private final ImageComponent imageComponent = new ImageComponent();
+ private final JPanel contentPanel;
- ImageEditorUI(BufferedImage image, EditorOptions editorOptions) {
+ ImageEditorUI(EditorOptions editorOptions) {
ImageDocument document = imageComponent.getDocument();
document.addChangeListener(changeListener);
@@ -66,12 +71,25 @@ final class ImageEditorUI extends JPanel {
component.addMouseListener(focusRequester);
scrollPane.addMouseListener(focusRequester);
- add(component, BorderLayout.NORTH);
- add(scrollPane, BorderLayout.CENTER);
+ JLabel errorLabel = new JLabel(
+ "<html><b>Image not loaded</b><br>Try to open it externaly to fix format problem</html>",
+ Messages.getErrorIcon(), JLabel.CENTER
+ );
+
+ JPanel errorPanel = new JPanel(new BorderLayout());
+ errorPanel.add(errorLabel, BorderLayout.CENTER);
- // Set content
- document.setValue(image);
+ contentPanel = new JPanel(new CardLayout());
+ contentPanel.add(scrollPane, IMAGE_PANEL);
+ contentPanel.add(errorPanel, ERROR_PANEL);
+
+ add(component, BorderLayout.NORTH);
+ add(contentPanel, BorderLayout.CENTER);
+ }
+
+ JComponent getContentComponent() {
+ return contentPanel;
}
ImageComponent getImageComponent() {
@@ -97,23 +115,23 @@ public ImageContainerPane(ImageComponent imageComponent) {
add(imageComponent);
}
- private void centerImage() {
- Point imageLocation = imageComponent.getLocation();
+ private void centerComponents() {
Rectangle bounds = getBounds();
- imageLocation.x = (bounds.width - imageComponent.getWidth()) / 2;
- imageLocation.y = (bounds.height - imageComponent.getHeight()) / 2;
- imageComponent.setLocation(imageLocation);
+ Point point = imageComponent.getLocation();
+ point.x = (bounds.width - imageComponent.getWidth()) / 2;
+ point.y = (bounds.height - imageComponent.getHeight()) / 2;
+ imageComponent.setLocation(point);
}
public void invalidate() {
- centerImage();
-
+ centerComponents();
super.invalidate();
}
public Dimension getPreferredSize() {
return imageComponent.getSize();
}
+
}
private final class ImageWheelAdapter implements MouseWheelListener {
@@ -136,15 +154,17 @@ private class ImageZoomModelImpl implements ImageZoomModel {
public double getZoomFactor() {
Dimension size = imageComponent.getCanvasSize();
BufferedImage image = imageComponent.getDocument().getValue();
- return size.getWidth() / (double)image.getWidth();
+ return image != null ? size.getWidth() / (double)image.getWidth() : 0.0d;
}
public void setZoomFactor(double zoomFactor) {
// Change current size
Dimension size = imageComponent.getCanvasSize();
BufferedImage image = imageComponent.getDocument().getValue();
- size.setSize((double)image.getWidth() * zoomFactor, (double)image.getHeight() * zoomFactor);
- imageComponent.setCanvasSize(size);
+ if (image != null) {
+ size.setSize((double)image.getWidth() * zoomFactor, (double)image.getHeight() * zoomFactor);
+ imageComponent.setCanvasSize(size);
+ }
revalidate();
repaint();
@@ -152,7 +172,7 @@ public void setZoomFactor(double zoomFactor) {
private double getMinimumZoomFactor() {
BufferedImage image = imageComponent.getDocument().getValue();
- return 1.0d / image.getWidth();
+ return image != null ? 1.0d / image.getWidth() : 0.0d;
}
public void zoomOut() {
@@ -202,6 +222,12 @@ public boolean canZoomIn() {
private class DocumentChangeListener implements ChangeListener {
public void stateChanged(ChangeEvent e) {
+ ImageDocument document = imageComponent.getDocument();
+ BufferedImage value = document.getValue();
+
+ CardLayout layout = (CardLayout)contentPanel.getLayout();
+ layout.show(contentPanel, value != null ? IMAGE_PANEL : ERROR_PANEL);
+
revalidate();
repaint();
}
diff --git a/plugins/images/src/org/intellij/images/editor/impl/ImageFileEditorImpl.java b/plugins/images/src/org/intellij/images/editor/impl/ImageFileEditorImpl.java
index 1c0c023b2db2c..19f9df96aeb18 100644
--- a/plugins/images/src/org/intellij/images/editor/impl/ImageFileEditorImpl.java
+++ b/plugins/images/src/org/intellij/images/editor/impl/ImageFileEditorImpl.java
@@ -18,10 +18,7 @@
import org.intellij.images.options.*;
import javax.swing.*;
-import java.awt.*;
-import java.awt.image.BufferedImage;
import java.beans.PropertyChangeListener;
-import java.io.IOException;
import java.io.Serializable;
/**
@@ -33,12 +30,10 @@ final class ImageFileEditorImpl extends UserDataHolderBase implements ImageFileE
private static final String NAME = "ImageFileEditor";
private final ImageEditor imageEditor;
- ImageFileEditorImpl(Project project, VirtualFile file) throws IOException {
+ ImageFileEditorImpl(Project project, VirtualFile file) {
ImageEditorManager imageEditorManager = getImageEditorManager();
imageEditor = imageEditorManager.createImageEditor(project, file);
- BufferedImage image = imageEditor.getDocument().getValue();
-
// Append file listener
VirtualFileManager.getInstance().addVirtualFileListener(imageEditor);
@@ -50,22 +45,6 @@ final class ImageFileEditorImpl extends UserDataHolderBase implements ImageFileE
TransparencyChessboardOptions transparencyChessboardOptions = editorOptions.getTransparencyChessboardOptions();
imageEditor.setGridVisible(gridOptions.isShowDefault());
imageEditor.setTransparencyChessboardVisible(transparencyChessboardOptions.isShowDefault());
-
- // Set smart zooming behaviour on open
- ZoomOptions zoomOptions = editorOptions.getZoomOptions();
- // Open as actual size
- zoomModel.setZoomFactor(1.0d);
-
- if (zoomOptions.isSmartZooming()) {
- Dimension prefferedSize = zoomOptions.getPrefferedSize();
- if (prefferedSize.width > image.getWidth() && prefferedSize.height > image.getHeight()) {
- // Resize to preffered size
- // Calculate zoom factor
-
- double factor = (prefferedSize.getWidth() / (double)image.getWidth() + prefferedSize.getHeight() / (double)image.getHeight()) / 2.0d;
- zoomModel.setZoomFactor(Math.ceil(factor));
- }
- }
}
private static ImageEditorManager getImageEditorManager() {
diff --git a/plugins/images/src/org/intellij/images/editor/impl/ImageFileEditorProvider.java b/plugins/images/src/org/intellij/images/editor/impl/ImageFileEditorProvider.java
index ddba58a1f799a..7d41b13cded07 100644
--- a/plugins/images/src/org/intellij/images/editor/impl/ImageFileEditorProvider.java
+++ b/plugins/images/src/org/intellij/images/editor/impl/ImageFileEditorProvider.java
@@ -7,8 +7,6 @@
import org.intellij.images.fileTypes.ImageFileTypeManager;
import org.jdom.Element;
-import java.io.IOException;
-
/**
* Image editor provider.
*
@@ -39,11 +37,7 @@ public boolean accept(Project project, VirtualFile file) {
}
public FileEditor createEditor(Project project, VirtualFile file) {
- try {
- return new ImageFileEditorImpl(project, file);
- } catch (IOException e) {
- throw new IllegalStateException(e);
- }
+ return new ImageFileEditorImpl(project, file);
}
public void disposeEditor(FileEditor editor) {
diff --git a/plugins/images/src/org/intellij/images/icons/EditExternaly.png b/plugins/images/src/org/intellij/images/icons/EditExternaly.png
new file mode 100644
index 0000000000000..48196e6c10e61
Binary files /dev/null and b/plugins/images/src/org/intellij/images/icons/EditExternaly.png differ
diff --git a/plugins/images/src/org/intellij/images/thumbnail/impl/ThumbnailViewUI.java b/plugins/images/src/org/intellij/images/thumbnail/impl/ThumbnailViewUI.java
index 4ed9845e7d54b..9d86b6b3a932c 100644
--- a/plugins/images/src/org/intellij/images/thumbnail/impl/ThumbnailViewUI.java
+++ b/plugins/images/src/org/intellij/images/thumbnail/impl/ThumbnailViewUI.java
@@ -25,7 +25,6 @@
import java.awt.image.BufferedImage;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
-import java.io.IOException;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashSet;
@@ -118,7 +117,7 @@ public int compare(VirtualFile o1, VirtualFile o2) {
return 1;
}
- return o1.getPath().compareTo(o2.getPath());
+ return o1.getPath().toLowerCase().compareTo(o2.getPath().toLowerCase());
}
}
);
@@ -141,6 +140,8 @@ public void setTransparencyChessboardVisible(boolean visible) {
private static final class ThumbnailListCellRenderer extends ThumbnailComponent
implements ListCellRenderer {
+ private final ImageFileTypeManager typeManager = ImageFileTypeManager.getInstance();
+
public Component getListCellRendererComponent(
JList list, Object value, int index, boolean isSelected, boolean cellHasFocus
) {
@@ -149,7 +150,19 @@ public Component getListCellRendererComponent(
setFileName(file.getName());
setToolTipText(file.getPath());
setDirectory(file.isDirectory());
- if (!file.isDirectory()) {
+ if (file.isDirectory()) {
+ int imagesCount = 0;
+ VirtualFile[] children = file.getChildren();
+ for (VirtualFile child : children) {
+ if (typeManager.isImage(child)) {
+ imagesCount++;
+ if (imagesCount > 100) {
+ break;
+ }
+ }
+ }
+ setImagesCount(imagesCount);
+ } else {
// File rendering
setFileSize(file.getLength());
try {
@@ -157,8 +170,10 @@ public Component getListCellRendererComponent(
ImageComponent imageComponent = getImageComponent();
imageComponent.getDocument().setValue(image);
setFormat(IfsUtil.getFormat(file));
- } catch (IOException e) {
+ } catch (Exception e) {
// Ignore
+ ImageComponent imageComponent = getImageComponent();
+ imageComponent.getDocument().setValue(null);
}
}
@@ -197,7 +212,7 @@ private Set<VirtualFile> findFiles(VirtualFile file) {
if (file.isDirectory()) {
if (thumbnailView.isRecursive()) {
files.addAll(findFiles(file.getChildren()));
- } else {
+ } else if (isImagesInDirectory(file)) {
files.add(file);
}
} else if (typeManager.isImage(file)) {
@@ -206,6 +221,17 @@ private Set<VirtualFile> findFiles(VirtualFile file) {
return files;
}
+ private boolean isImagesInDirectory(VirtualFile dir) {
+ ImageFileTypeManager typeManager = ImageFileTypeManager.getInstance();
+ VirtualFile[] files = dir.getChildren();
+ for (VirtualFile file : files) {
+ if (typeManager.isImage(file)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
private final class ThumbnailsMouseAdapter extends MouseAdapter {
public void mousePressed(MouseEvent e) {
super.mousePressed(e);
diff --git a/plugins/images/src/org/intellij/images/ui/ThumbnailComponent.java b/plugins/images/src/org/intellij/images/ui/ThumbnailComponent.java
index a186da00224ac..5ce3cd77aa013 100644
--- a/plugins/images/src/org/intellij/images/ui/ThumbnailComponent.java
+++ b/plugins/images/src/org/intellij/images/ui/ThumbnailComponent.java
@@ -26,6 +26,7 @@ public class ThumbnailComponent extends JComponent {
private long fileSize;
private String fileName;
private boolean directory;
+ private int imagesCount;
public ThumbnailComponent() {
updateUI();
@@ -83,6 +84,18 @@ public void setDirectory(boolean directory) {
}
}
+ public int getImagesCount() {
+ return imagesCount;
+ }
+
+ public void setImagesCount(int imagesCount) {
+ int oldValue = this.imagesCount;
+ if (oldValue != imagesCount) {
+ this.imagesCount = imagesCount;
+ firePropertyChange("imagesCount", oldValue, this.imagesCount);
+ }
+ }
+
public String getFileSizeText() {
if (fileSize < 0x400) {
return fileSize + "b";
diff --git a/plugins/images/src/org/intellij/images/ui/ThumbnailComponentUI.java b/plugins/images/src/org/intellij/images/ui/ThumbnailComponentUI.java
index 635c746d833b1..c88212de85742 100644
--- a/plugins/images/src/org/intellij/images/ui/ThumbnailComponentUI.java
+++ b/plugins/images/src/org/intellij/images/ui/ThumbnailComponentUI.java
@@ -1,6 +1,7 @@
/** $Id$ */
package org.intellij.images.ui;
+import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.IconLoader;
import org.intellij.images.editor.ImageDocument;
@@ -15,11 +16,9 @@
* @author <a href="mailto:[email protected]">Alexey Efimov</a>
*/
public class ThumbnailComponentUI extends ComponentUI {
- private static final Icon THUMBNAIL_BLANK = IconLoader.getIcon("/org/intellij/images/icons/ThumbnailBlank.png");
- private static final Icon THUMBNAIL_DIRECTORY = IconLoader.getIcon("/org/intellij/images/icons/ThumbnailDirectory.png");
- private static final Icon THUMBNAIL_ERROR = IconLoader.getIcon("/org/intellij/images/icons/ThumbnailError.png");
- private static final int THUMBNAIL_BLANK_WIDTH = THUMBNAIL_BLANK.getIconWidth();
- private static final int THUMBNAIL_BLANK_HEIGHT = THUMBNAIL_BLANK.getIconHeight();
+ private static final Icon BLANK_ICON = IconLoader.getIcon("/org/intellij/images/icons/ThumbnailBlank.png");
+ private static final Icon DIRECTORY_ICON = IconLoader.getIcon("/org/intellij/images/icons/ThumbnailDirectory.png");
+ private static final Icon ERROR_ICON = Messages.getErrorIcon();
private static final String DOTS = "...";
private static final ThumbnailComponentUI ui = new ThumbnailComponentUI();
@@ -46,10 +45,25 @@ public void paint(Graphics g, JComponent c) {
}
private void paintDirectory(Graphics g, ThumbnailComponent tc) {
- THUMBNAIL_DIRECTORY.paintIcon(tc, g, 5, 5);
+ // Paint directory icon
+ DIRECTORY_ICON.paintIcon(tc, g, 5, 5);
+
+ int imagesCount = tc.getImagesCount();
+ if (imagesCount > 0) {
+ String title = (imagesCount > 100 ? ">100" : "" + imagesCount) + " icons";
+
+ Font font = getSmallFont();
+ FontMetrics fontMetrics = g.getFontMetrics(font);
+ g.setColor(Color.BLACK);
+ g.setFont(font);
+ g.drawString(title, 5 + (DIRECTORY_ICON.getIconWidth() - fontMetrics.stringWidth(title)) / 2, DIRECTORY_ICON.getIconHeight() / 2 + fontMetrics.getAscent());
+ }
}
private void paintImageThumbnail(Graphics g, ThumbnailComponent tc) {
+ // Paint blank
+ BLANK_ICON.paintIcon(tc, g, 5, 5);
+
ImageComponent imageComponent = tc.getImageComponent();
ImageDocument document = imageComponent.getDocument();
BufferedImage image = document.getValue();
@@ -58,6 +72,7 @@ private void paintImageThumbnail(Graphics g, ThumbnailComponent tc) {
} else {
paintError(g, tc);
}
+
paintFileSize(g, tc);
}
@@ -71,10 +86,7 @@ private void paintImage(Graphics g, ThumbnailComponent tc) {
ImageComponent imageComponent = tc.getImageComponent();
BufferedImage image = imageComponent.getDocument().getValue();
- // Paint image blank
- THUMBNAIL_BLANK.paintIcon(tc, g, 5, 5);
-
- int blankHeight = THUMBNAIL_BLANK_HEIGHT;
+ int blankHeight = BLANK_ICON.getIconHeight();
// Paint image info (and reduce height of text from available height)
blankHeight -= paintImageCaps(g, image);
@@ -103,7 +115,11 @@ private int paintFormatText(ThumbnailComponent tc, Graphics g) {
String format = tc.getFormat();
int stringWidth = fontMetrics.stringWidth(format);
- g.drawString(format, THUMBNAIL_BLANK_WIDTH - stringWidth - 3, THUMBNAIL_BLANK_HEIGHT + 2 - fontMetrics.getHeight() + fontMetrics.getAscent());
+ g.drawString(
+ format,
+ BLANK_ICON.getIconWidth() - stringWidth - 3,
+ BLANK_ICON.getIconHeight() + 2 - fontMetrics.getHeight() + fontMetrics.getAscent()
+ );
return fontMetrics.getHeight();
}
@@ -111,7 +127,7 @@ private int paintFormatText(ThumbnailComponent tc, Graphics g) {
private void paintThumbnail(Graphics g, ImageComponent imageComponent, int blankHeight) {
// Zoom image by available size
- int maxWidth = THUMBNAIL_BLANK_WIDTH - 10;
+ int maxWidth = BLANK_ICON.getIconWidth() - 10;
int maxHeight = blankHeight - 10;
BufferedImage image = imageComponent.getDocument().getValue();
@@ -137,8 +153,8 @@ private void paintThumbnail(Graphics g, ImageComponent imageComponent, int blank
imageComponent.setCanvasSize(imageWidth, imageHeight);
Dimension size = imageComponent.getSize();
- int x = 5 + (THUMBNAIL_BLANK_WIDTH - size.width) / 2;
- int y = 5 + (THUMBNAIL_BLANK_HEIGHT - size.height) / 2;
+ int x = 5 + (BLANK_ICON.getIconWidth() - size.width) / 2;
+ int y = 5 + (BLANK_ICON.getIconHeight() - size.height) / 2;
imageComponent.paint(g.create(x, y, size.width, size.height));
@@ -153,18 +169,19 @@ private void paintFileName(Graphics g, ThumbnailComponent tc) {
String fileName = tc.getFileName();
String title = fileName;
- while (fontMetrics.stringWidth(title) > THUMBNAIL_BLANK_WIDTH - 8) {
+ while (fontMetrics.stringWidth(title) > BLANK_ICON.getIconWidth() - 8) {
title = title.substring(0, title.length() - 1);
}
if (fileName.equals(title)) {
- g.drawString(fileName, 8, THUMBNAIL_BLANK_HEIGHT + 8 + fontMetrics.getAscent());
+ // Center
+ g.drawString(fileName, 6 + (BLANK_ICON.getIconWidth() - 2 - fontMetrics.stringWidth(title)) / 2, BLANK_ICON.getIconHeight() + 8 + fontMetrics.getAscent());
} else {
int dotsWidth = fontMetrics.stringWidth(DOTS);
- while (fontMetrics.stringWidth(title) > THUMBNAIL_BLANK_WIDTH - 8 - dotsWidth) {
+ while (fontMetrics.stringWidth(title) > BLANK_ICON.getIconWidth() - 8 - dotsWidth) {
title = title.substring(0, title.length() - 1);
}
- g.drawString(title + DOTS, 6, THUMBNAIL_BLANK_HEIGHT + 8 + fontMetrics.getAscent());
+ g.drawString(title + DOTS, 6, BLANK_ICON.getIconHeight() + 8 + fontMetrics.getAscent());
}
}
@@ -173,13 +190,24 @@ private void paintFileSize(Graphics g, ThumbnailComponent tc) {
FontMetrics fontMetrics = g.getFontMetrics(font);
g.setColor(Color.BLACK);
g.setFont(font);
- g.drawString(tc.getFileSizeText(), 8, THUMBNAIL_BLANK_HEIGHT + 2 - fontMetrics.getHeight() + fontMetrics.getAscent());
+ g.drawString(
+ tc.getFileSizeText(),
+ 8,
+ BLANK_ICON.getIconHeight() + 2 - fontMetrics.getHeight() + fontMetrics.getAscent()
+ );
}
private void paintError(Graphics g, ThumbnailComponent tc) {
Font font = getSmallFont();
FontMetrics fontMetrics = g.getFontMetrics(font);
- THUMBNAIL_ERROR.paintIcon(tc, g, 5, 5);
+
+ ERROR_ICON.paintIcon(
+ tc,
+ g,
+ 5 + (BLANK_ICON.getIconWidth() - ERROR_ICON.getIconWidth()) / 2,
+ 5 + (BLANK_ICON.getIconHeight() - ERROR_ICON.getIconHeight()) / 2
+ );
+
// Error
String error = UIManager.getString("ThumbnailComponent.errorString");
g.setColor(Color.RED);
@@ -199,7 +227,10 @@ private static Font getSmallFont() {
public Dimension getPreferredSize(JComponent c) {
Font labelFont = getLabelFont();
FontMetrics fontMetrics = c.getFontMetrics(labelFont);
- return new Dimension(THUMBNAIL_BLANK_WIDTH + 10, THUMBNAIL_BLANK_HEIGHT + fontMetrics.getHeight() + 15);
+ return new Dimension(
+ BLANK_ICON.getIconWidth() + 10,
+ BLANK_ICON.getIconHeight() + fontMetrics.getHeight() + 15
+ );
}
public static ComponentUI createUI(JComponent c) {
diff --git a/plugins/images/src/org/intellij/images/vfs/IfsUtil.java b/plugins/images/src/org/intellij/images/vfs/IfsUtil.java
index f0266b3ccc608..4553149d350bc 100644
--- a/plugins/images/src/org/intellij/images/vfs/IfsUtil.java
+++ b/plugins/images/src/org/intellij/images/vfs/IfsUtil.java
@@ -2,9 +2,18 @@
package org.intellij.images.vfs;
import com.intellij.openapi.util.Key;
+import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
+import com.intellij.openapi.vfs.LocalFileSystem;
+import com.intellij.openapi.vfs.VfsUtil;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.ui.Messages;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
+import org.intellij.images.options.Options;
+import org.intellij.images.options.OptionsManager;
+import org.intellij.images.options.impl.OptionsConfigurabe;
+import org.intellij.images.fileTypes.ImageFileTypeManager;
import javax.imageio.ImageIO;
import javax.imageio.ImageReadParam;
@@ -13,6 +22,7 @@
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.io.InputStream;
+import java.io.File;
import java.lang.ref.SoftReference;
import java.util.Iterator;
@@ -47,7 +57,9 @@ private static boolean refresh(@NotNull VirtualFile file) throws IOException {
file.putUserData(FORMAT_KEY, imageReader.getFormatName());
ImageReadParam param = imageReader.getDefaultReadParam();
imageReader.setInput(imageInputStream, true, true);
- file.putUserData(BUFFERED_IMAGE_REF_KEY, new SoftReference<BufferedImage>(imageReader.read(0, param)));
+ int minIndex = imageReader.getMinIndex();
+ BufferedImage image = imageReader.read(minIndex, param);
+ file.putUserData(BUFFERED_IMAGE_REF_KEY, new SoftReference<BufferedImage>(image));
return true;
} finally {
imageReader.dispose();
@@ -74,4 +86,5 @@ private static boolean refresh(@NotNull VirtualFile file) throws IOException {
refresh(file);
return file.getUserData(FORMAT_KEY);
}
+
}
|
96b3801250c561bdcfc7504100309176ec502f57
|
ReactiveX-RxJava
|
Change order of generics for Func from R, T to T, R- to match C- Rx--
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/observables/Observable.java b/rxjava-core/src/main/java/rx/observables/Observable.java
index a877081949..9d41cef5d6 100644
--- a/rxjava-core/src/main/java/rx/observables/Observable.java
+++ b/rxjava-core/src/main/java/rx/observables/Observable.java
@@ -371,7 +371,7 @@ public Subscription subscribe(Observer<T> observer) {
* cancelling the subscription (if applicable)
* @return a Observable that, when a Observer subscribes to it, will execute the given function
*/
- public static <T> Observable<T> create(Func1<Subscription, Observer<T>> func) {
+ public static <T> Observable<T> create(Func1<Observer<T>, Subscription> func) {
return OperationToObservableFunction.toObservableFunction(func);
}
@@ -395,7 +395,7 @@ public static <T> Observable<T> create(Func1<Subscription, Observer<T>> func) {
* @return a Observable that, when a Observer subscribes to it, will execute the given function
*/
public static <T> Observable<T> create(final Object callback) {
- return create(new Func1<Subscription, Observer<T>>() {
+ return create(new Func1<Observer<T>, Subscription>() {
@Override
public Subscription call(Observer<T> t1) {
@@ -451,7 +451,7 @@ public static <T> Observable<T> error(Exception exception) {
* @return a Observable that emits only those items in the original Observable that the filter
* evaluates as true
*/
- public static <T> Observable<T> filter(Observable<T> that, Func1<Boolean, T> predicate) {
+ public static <T> Observable<T> filter(Observable<T> that, Func1<T, Boolean> predicate) {
return OperationFilter.filter(that, predicate);
}
@@ -469,7 +469,7 @@ public static <T> Observable<T> filter(Observable<T> that, Func1<Boolean, T> pre
* evaluates as true
*/
public static <T> Observable<T> filter(Observable<T> that, final Object function) {
- return filter(that, new Func1<Boolean, T>() {
+ return filter(that, new Func1<T, Boolean>() {
@Override
public Boolean call(T t1) {
@@ -574,7 +574,7 @@ public static <T> Observable<T> last(final Observable<T> that) {
* @return a Observable that is the result of applying the transformation function to each item
* in the sequence emitted by the source Observable
*/
- public static <T, R> Observable<R> map(Observable<T> sequence, Func1<R, T> func) {
+ public static <T, R> Observable<R> map(Observable<T> sequence, Func1<T, R> func) {
return OperationMap.map(sequence, func);
}
@@ -597,7 +597,7 @@ public static <T, R> Observable<R> map(Observable<T> sequence, Func1<R, T> func)
* in the sequence emitted by the source Observable
*/
public static <T, R> Observable<R> map(Observable<T> sequence, final Object function) {
- return map(sequence, new Func1<R, T>() {
+ return map(sequence, new Func1<T, R>() {
@Override
public R call(T t1) {
@@ -629,7 +629,7 @@ public R call(T t1) {
* function to each item emitted by the source Observable and merging the results of
* the Observables obtained from this transformation
*/
- public static <T, R> Observable<R> mapMany(Observable<T> sequence, Func1<Observable<R>, T> func) {
+ public static <T, R> Observable<R> mapMany(Observable<T> sequence, Func1<T, Observable<R>> func) {
return OperationMap.mapMany(sequence, func);
}
@@ -656,7 +656,7 @@ public static <T, R> Observable<R> mapMany(Observable<T> sequence, Func1<Observa
* Observables obtained from this transformation
*/
public static <T, R> Observable<R> mapMany(Observable<T> sequence, final Object function) {
- return mapMany(sequence, new Func1<R, T>() {
+ return mapMany(sequence, new Func1<T, R>() {
@Override
public R call(T t1) {
@@ -846,7 +846,7 @@ public static Subscription noOpSubscription() {
* encounters an error
* @return the source Observable, with its behavior modified as described
*/
- public static <T> Observable<T> onErrorResumeNext(final Observable<T> that, final Func1<Observable<T>, Exception> resumeFunction) {
+ public static <T> Observable<T> onErrorResumeNext(final Observable<T> that, final Func1<Exception, Observable<T>> resumeFunction) {
return OperationOnErrorResumeNextViaFunction.onErrorResumeNextViaFunction(that, resumeFunction);
}
@@ -876,7 +876,7 @@ public static <T> Observable<T> onErrorResumeNext(final Observable<T> that, fina
* @return the source Observable, with its behavior modified as described
*/
public static <T> Observable<T> onErrorResumeNext(final Observable<T> that, final Object resumeFunction) {
- return onErrorResumeNext(that, new Func1<Observable<T>, Exception>() {
+ return onErrorResumeNext(that, new Func1<Exception, Observable<T>>() {
@Override
public Observable<T> call(Exception e) {
@@ -936,7 +936,7 @@ public static <T> Observable<T> onErrorResumeNext(final Observable<T> that, fina
* otherwise cause it to call <code>onError</code>
* @return the source Observable, with its behavior modified as described
*/
- public static <T> Observable<T> onErrorReturn(final Observable<T> that, Func1<T, Exception> resumeFunction) {
+ public static <T> Observable<T> onErrorReturn(final Observable<T> that, Func1<Exception, T> resumeFunction) {
return OperationOnErrorReturn.onErrorReturn(that, resumeFunction);
}
@@ -1344,7 +1344,7 @@ public static <T> Observable<List<T>> toSortedList(Observable<T> sequence) {
* @param sortFunction
* @return
*/
- public static <T> Observable<List<T>> toSortedList(Observable<T> sequence, Func2<Integer, T, T> sortFunction) {
+ public static <T> Observable<List<T>> toSortedList(Observable<T> sequence, Func2<T, T, Integer> sortFunction) {
return OperationToObservableSortedList.toSortedList(sequence, sortFunction);
}
@@ -1359,7 +1359,7 @@ public static <T> Observable<List<T>> toSortedList(Observable<T> sequence, Func2
* @return
*/
public static <T> Observable<List<T>> toSortedList(Observable<T> sequence, final Object sortFunction) {
- return OperationToObservableSortedList.toSortedList(sequence, new Func2<Integer, T, T>() {
+ return OperationToObservableSortedList.toSortedList(sequence, new Func2<T, T, Integer>() {
@Override
public Integer call(T t1, T t2) {
@@ -1393,7 +1393,7 @@ public Integer call(T t1, T t2) {
* results in a value that will be emitted by the resulting Observable
* @return a Observable that emits the zipped results
*/
- public static <R, T0, T1> Observable<R> zip(Observable<T0> w0, Observable<T1> w1, Func2<R, T0, T1> reduceFunction) {
+ public static <R, T0, T1> Observable<R> zip(Observable<T0> w0, Observable<T1> w1, Func2<T0, T1, R> reduceFunction) {
return OperationZip.zip(w0, w1, reduceFunction);
}
@@ -1422,7 +1422,7 @@ public static <R, T0, T1> Observable<R> zip(Observable<T0> w0, Observable<T1> w1
* @return a Observable that emits the zipped results
*/
public static <R, T0, T1> Observable<R> zip(Observable<T0> w0, Observable<T1> w1, final Object function) {
- return zip(w0, w1, new Func2<R, T0, T1>() {
+ return zip(w0, w1, new Func2<T0, T1, R>() {
@Override
public R call(T0 t0, T1 t1) {
@@ -1460,7 +1460,7 @@ public R call(T0 t0, T1 t1) {
* results in a value that will be emitted by the resulting Observable
* @return a Observable that emits the zipped results
*/
- public static <R, T0, T1, T2> Observable<R> zip(Observable<T0> w0, Observable<T1> w1, Observable<T2> w2, Func3<R, T0, T1, T2> function) {
+ public static <R, T0, T1, T2> Observable<R> zip(Observable<T0> w0, Observable<T1> w1, Observable<T2> w2, Func3<T0, T1, T2, R> function) {
return OperationZip.zip(w0, w1, w2, function);
}
@@ -1493,7 +1493,7 @@ public static <R, T0, T1, T2> Observable<R> zip(Observable<T0> w0, Observable<T1
* @return a Observable that emits the zipped results
*/
public static <R, T0, T1, T2> Observable<R> zip(Observable<T0> w0, Observable<T1> w1, Observable<T2> w2, final Object function) {
- return zip(w0, w1, w2, new Func3<R, T0, T1, T2>() {
+ return zip(w0, w1, w2, new Func3<T0, T1, T2, R>() {
@Override
public R call(T0 t0, T1 t1, T2 t2) {
@@ -1532,7 +1532,7 @@ public R call(T0 t0, T1 t1, T2 t2) {
* results in a value that will be emitted by the resulting Observable
* @return a Observable that emits the zipped results
*/
- public static <R, T0, T1, T2, T3> Observable<R> zip(Observable<T0> w0, Observable<T1> w1, Observable<T2> w2, Observable<T3> w3, Func4<R, T0, T1, T2, T3> reduceFunction) {
+ public static <R, T0, T1, T2, T3> Observable<R> zip(Observable<T0> w0, Observable<T1> w1, Observable<T2> w2, Observable<T3> w3, Func4<T0, T1, T2, T3, R> reduceFunction) {
return OperationZip.zip(w0, w1, w2, w3, reduceFunction);
}
@@ -1566,7 +1566,7 @@ public static <R, T0, T1, T2, T3> Observable<R> zip(Observable<T0> w0, Observabl
* @return a Observable that emits the zipped results
*/
public static <R, T0, T1, T2, T3> Observable<R> zip(Observable<T0> w0, Observable<T1> w1, Observable<T2> w2, Observable<T3> w3, final Object function) {
- return zip(w0, w1, w2, w3, new Func4<R, T0, T1, T2, T3>() {
+ return zip(w0, w1, w2, w3, new Func4<T0, T1, T2, T3, R>() {
@Override
public R call(T0 t0, T1 t1, T2 t2, T3 t3) {
@@ -1605,7 +1605,7 @@ public Observable<T> filter(Func1<Boolean, T> predicate) {
* evaluates as "true"
*/
public Observable<T> filter(final Object callback) {
- return filter(this, new Func1<Boolean, T>() {
+ return filter(this, new Func1<T, Boolean>() {
public Boolean call(T t1) {
return Functions.execute(callback, t1);
@@ -1655,7 +1655,7 @@ public <R> Observable<R> map(Func1<R, T> func) {
* closure to each item in the sequence emitted by the input Observable.
*/
public <R> Observable<R> map(final Object callback) {
- return map(this, new Func1<R, T>() {
+ return map(this, new Func1<T, R>() {
public R call(T t1) {
return Functions.execute(callback, t1);
@@ -1678,7 +1678,7 @@ public R call(T t1) {
* function to each item in the input sequence and merging the results of the
* Observables obtained from this transformation.
*/
- public <R> Observable<R> mapMany(Func1<Observable<R>, T> func) {
+ public <R> Observable<R> mapMany(Func1<T, Observable<R>> func) {
return mapMany(this, func);
}
@@ -1698,7 +1698,7 @@ public <R> Observable<R> mapMany(Func1<Observable<R>, T> func) {
* Observables obtained from this transformation.
*/
public <R> Observable<R> mapMany(final Object callback) {
- return mapMany(this, new Func1<Observable<R>, T>() {
+ return mapMany(this, new Func1<T, Observable<R>>() {
public Observable<R> call(T t1) {
return Functions.execute(callback, t1);
@@ -1742,7 +1742,7 @@ public Observable<Notification<T>> materialize() {
* @param resumeFunction
* @return the original Observable, with appropriately modified behavior
*/
- public Observable<T> onErrorResumeNext(final Func1<Observable<T>, Exception> resumeFunction) {
+ public Observable<T> onErrorResumeNext(final Func1<Exception, Observable<T>> resumeFunction) {
return onErrorResumeNext(this, resumeFunction);
}
@@ -1771,7 +1771,7 @@ public Observable<T> onErrorResumeNext(final Func1<Observable<T>, Exception> res
* @return the original Observable with appropriately modified behavior
*/
public Observable<T> onErrorResumeNext(final Object resumeFunction) {
- return onErrorResumeNext(this, new Func1<Observable<T>, Exception>() {
+ return onErrorResumeNext(this, new Func1<Exception, Observable<T>>() {
public Observable<T> call(Exception e) {
return Functions.execute(resumeFunction, e);
@@ -1829,7 +1829,7 @@ public Observable<T> onErrorResumeNext(final Observable<T> resumeSequence) {
* @param resumeFunction
* @return the original Observable with appropriately modified behavior
*/
- public Observable<T> onErrorReturn(Func1<T, Exception> resumeFunction) {
+ public Observable<T> onErrorReturn(Func1<Exception, T> resumeFunction) {
return onErrorReturn(this, resumeFunction);
}
@@ -1857,7 +1857,7 @@ public Observable<T> onErrorReturn(Func1<T, Exception> resumeFunction) {
* @return the original Observable with appropriately modified behavior
*/
public Observable<T> onErrorReturn(final Object resumeFunction) {
- return onErrorReturn(this, new Func1<T, Exception>() {
+ return onErrorReturn(this, new Func1<Exception, T>() {
public T call(Exception e) {
return Functions.execute(resumeFunction, e);
@@ -2154,7 +2154,7 @@ public Observable<List<T>> toSortedList() {
* @param sortFunction
* @return
*/
- public Observable<List<T>> toSortedList(Func2<Integer, T, T> sortFunction) {
+ public Observable<List<T>> toSortedList(Func2<T, T, Integer> sortFunction) {
return toSortedList(this, sortFunction);
}
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationCombineLatest.java b/rxjava-core/src/main/java/rx/observables/operations/OperationCombineLatest.java
index e4f4835b9e..1d979407a1 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationCombineLatest.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationCombineLatest.java
@@ -1,12 +1,12 @@
/**
* Copyright 2013 Netflix, Inc.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -40,14 +40,14 @@
public class OperationCombineLatest {
- public static <R, T0, T1> Observable<R> combineLatest(Observable<T0> w0, Observable<T1> w1, Func2<R, T0, T1> combineLatestFunction) {
+ public static <T0, T1, R> Observable<R> combineLatest(Observable<T0> w0, Observable<T1> w1, Func2<T0, T1, R> combineLatestFunction) {
Aggregator<R> a = new Aggregator<R>(Functions.fromFunc(combineLatestFunction));
a.addObserver(new CombineObserver<R, T0>(a, w0));
a.addObserver(new CombineObserver<R, T1>(a, w1));
return a;
}
- public static <R, T0, T1, T2> Observable<R> combineLatest(Observable<T0> w0, Observable<T1> w1, Observable<T2> w2, Func3<R, T0, T1, T2> combineLatestFunction) {
+ public static <T0, T1, T2, R> Observable<R> combineLatest(Observable<T0> w0, Observable<T1> w1, Observable<T2> w2, Func3<T0, T1, T2, R> combineLatestFunction) {
Aggregator<R> a = new Aggregator<R>(Functions.fromFunc(combineLatestFunction));
a.addObserver(new CombineObserver<R, T0>(a, w0));
a.addObserver(new CombineObserver<R, T1>(a, w1));
@@ -55,7 +55,7 @@ public static <R, T0, T1, T2> Observable<R> combineLatest(Observable<T0> w0, Obs
return a;
}
- public static <R, T0, T1, T2, T3> Observable<R> combineLatest(Observable<T0> w0, Observable<T1> w1, Observable<T2> w2, Observable<T3> w3, Func4<R, T0, T1, T2, T3> combineLatestFunction) {
+ public static <T0, T1, T2, T3, R> Observable<R> combineLatest(Observable<T0> w0, Observable<T1> w1, Observable<T2> w2, Observable<T3> w3, Func4<T0, T1, T2, T3, R> combineLatestFunction) {
Aggregator<R> a = new Aggregator<R>(Functions.fromFunc(combineLatestFunction));
a.addObserver(new CombineObserver<R, T0>(a, w0));
a.addObserver(new CombineObserver<R, T1>(a, w1));
@@ -663,7 +663,7 @@ public void testAggregatorEarlyCompletion() {
/* mock calls don't do generics */
@Test
public void testCombineLatest2Types() {
- Func2<String, String, Integer> combineLatestFunction = getConcatStringIntegerCombineLatestFunction();
+ Func2<String, Integer, String> combineLatestFunction = getConcatStringIntegerCombineLatestFunction();
/* define a Observer to receive aggregated events */
Observer<String> aObserver = mock(Observer.class);
@@ -682,7 +682,7 @@ public void testCombineLatest2Types() {
/* mock calls don't do generics */
@Test
public void testCombineLatest3TypesA() {
- Func3<String, String, Integer, int[]> combineLatestFunction = getConcatStringIntegerIntArrayCombineLatestFunction();
+ Func3<String, Integer, int[], String> combineLatestFunction = getConcatStringIntegerIntArrayCombineLatestFunction();
/* define a Observer to receive aggregated events */
Observer<String> aObserver = mock(Observer.class);
@@ -699,7 +699,7 @@ public void testCombineLatest3TypesA() {
/* mock calls don't do generics */
@Test
public void testCombineLatest3TypesB() {
- Func3<String, String, Integer, int[]> combineLatestFunction = getConcatStringIntegerIntArrayCombineLatestFunction();
+ Func3<String, Integer, int[], String> combineLatestFunction = getConcatStringIntegerIntArrayCombineLatestFunction();
/* define a Observer to receive aggregated events */
Observer<String> aObserver = mock(Observer.class);
@@ -753,8 +753,8 @@ public String call(Object... args) {
return combineLatestFunction;
}
- private Func2<String, String, Integer> getConcatStringIntegerCombineLatestFunction() {
- Func2<String, String, Integer> combineLatestFunction = new Func2<String, String, Integer>() {
+ private Func2<String, Integer, String> getConcatStringIntegerCombineLatestFunction() {
+ Func2<String, Integer, String> combineLatestFunction = new Func2<String, Integer, String>() {
@Override
public String call(String s, Integer i) {
@@ -765,8 +765,8 @@ public String call(String s, Integer i) {
return combineLatestFunction;
}
- private Func3<String, String, Integer, int[]> getConcatStringIntegerIntArrayCombineLatestFunction() {
- Func3<String, String, Integer, int[]> combineLatestFunction = new Func3<String, String, Integer, int[]>() {
+ private Func3<String, Integer, int[], String> getConcatStringIntegerIntArrayCombineLatestFunction() {
+ Func3<String, Integer, int[], String> combineLatestFunction = new Func3<String, Integer, int[], String>() {
@Override
public String call(String s, Integer i, int[] iArray) {
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationFilter.java b/rxjava-core/src/main/java/rx/observables/operations/OperationFilter.java
index a4d228e40b..4917dfa00e 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationFilter.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationFilter.java
@@ -1,12 +1,12 @@
/**
* Copyright 2013 Netflix, Inc.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -28,16 +28,16 @@
public final class OperationFilter<T> {
- public static <T> Observable<T> filter(Observable<T> that, Func1<Boolean, T> predicate) {
+ public static <T> Observable<T> filter(Observable<T> that, Func1<T, Boolean> predicate) {
return new Filter<T>(that, predicate);
}
private static class Filter<T> extends Observable<T> {
private final Observable<T> that;
- private final Func1<Boolean, T> predicate;
+ private final Func1<T, Boolean> predicate;
- public Filter(Observable<T> that, Func1<Boolean, T> predicate) {
+ public Filter(Observable<T> that, Func1<T, Boolean> predicate) {
this.that = that;
this.predicate = predicate;
}
@@ -76,7 +76,7 @@ public static class UnitTest {
@Test
public void testFilter() {
Observable<String> w = Observable.toObservable("one", "two", "three");
- Observable<String> Observable = filter(w, new Func1<Boolean, String>() {
+ Observable<String> Observable = filter(w, new Func1<String, Boolean>() {
@Override
public Boolean call(String t1) {
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationMap.java b/rxjava-core/src/main/java/rx/observables/operations/OperationMap.java
index 1cd451e108..d6afeca9ca 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationMap.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationMap.java
@@ -1,12 +1,12 @@
/**
* Copyright 2013 Netflix, Inc.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -47,7 +47,7 @@ public final class OperationMap {
* the type of the output sequence.
* @return a sequence that is the result of applying the transformation function to each item in the input sequence.
*/
- public static <T, R> Observable<R> map(Observable<T> sequence, Func1<R, T> func) {
+ public static <T, R> Observable<R> map(Observable<T> sequence, Func1<T, R> func) {
return new MapObservable<T, R>(sequence, func);
}
@@ -67,7 +67,7 @@ public static <T, R> Observable<R> map(Observable<T> sequence, Func1<R, T> func)
* the type of the output sequence.
* @return a sequence that is the result of applying the transformation function to each item in the input sequence.
*/
- public static <T, R> Observable<R> mapMany(Observable<T> sequence, Func1<Observable<R>, T> func) {
+ public static <T, R> Observable<R> mapMany(Observable<T> sequence, Func1<T, Observable<R>> func) {
return OperationMerge.merge(map(sequence, func));
}
@@ -80,14 +80,14 @@ public static <T, R> Observable<R> mapMany(Observable<T> sequence, Func1<Observa
* the type of the output sequence.
*/
private static class MapObservable<T, R> extends Observable<R> {
- public MapObservable(Observable<T> sequence, Func1<R, T> func) {
+ public MapObservable(Observable<T> sequence, Func1<T, R> func) {
this.sequence = sequence;
this.func = func;
}
private Observable<T> sequence;
- private Func1<R, T> func;
+ private Func1<T, R> func;
public Subscription subscribe(Observer<R> Observer) {
final AtomicObservableSubscription subscription = new AtomicObservableSubscription();
@@ -106,14 +106,14 @@ public Subscription subscribe(Observer<R> Observer) {
* the type of the inner observer items.
*/
private static class MapObserver<T, R> implements Observer<T> {
- public MapObserver(Observer<R> observer, Func1<R, T> func) {
+ public MapObserver(Observer<R> observer, Func1<T, R> func) {
this.observer = observer;
this.func = func;
}
Observer<R> observer;
- Func1<R, T> func;
+ Func1<T, R> func;
public void onNext(T value) {
try {
@@ -148,7 +148,7 @@ public void testMap() {
@SuppressWarnings("unchecked")
Observable<Map<String, String>> observable = Observable.toObservable(m1, m2);
- Observable<String> m = map(observable, new Func1<String, Map<String, String>>() {
+ Observable<String> m = map(observable, new Func1<Map<String, String>, String>() {
@Override
public String call(Map<String, String> map) {
@@ -171,7 +171,7 @@ public void testMapMany() {
Observable<Integer> ids = Observable.toObservable(1, 2);
/* now simulate the behavior to take those IDs and perform nested async calls based on them */
- Observable<String> m = mapMany(ids, new Func1<Observable<String>, Integer>() {
+ Observable<String> m = mapMany(ids, new Func1<Integer, Observable<String>>() {
@SuppressWarnings("unchecked")
@Override
@@ -189,7 +189,7 @@ public Observable<String> call(Integer id) {
}
/* simulate kicking off the async call and performing a select on it to transform the data */
- return map(subObservable, new Func1<String, Map<String, String>>() {
+ return map(subObservable, new Func1<Map<String, String>, String>() {
@Override
public String call(Map<String, String> map) {
return map.get("firstName");
@@ -223,11 +223,11 @@ public void testMapMany2() {
@SuppressWarnings("unchecked")
Observable<Observable<Map<String, String>>> observable = Observable.toObservable(observable1, observable2);
- Observable<String> m = mapMany(observable, new Func1<Observable<String>, Observable<Map<String, String>>>() {
+ Observable<String> m = mapMany(observable, new Func1<Observable<Map<String, String>>, Observable<String>>() {
@Override
public Observable<String> call(Observable<Map<String, String>> o) {
- return map(o, new Func1<String, Map<String, String>>() {
+ return map(o, new Func1<Map<String, String>, String>() {
@Override
public String call(Map<String, String> map) {
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorResumeNextViaFunction.java b/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorResumeNextViaFunction.java
index 91b48e26d9..8d3e2b2cdd 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorResumeNextViaFunction.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorResumeNextViaFunction.java
@@ -1,12 +1,12 @@
/**
* Copyright 2013 Netflix, Inc.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -33,16 +33,16 @@
public final class OperationOnErrorResumeNextViaFunction<T> {
- public static <T> Observable<T> onErrorResumeNextViaFunction(Observable<T> originalSequence, Func1<Observable<T>, Exception> resumeFunction) {
+ public static <T> Observable<T> onErrorResumeNextViaFunction(Observable<T> originalSequence, Func1<Exception, Observable<T>> resumeFunction) {
return new OnErrorResumeNextViaFunction<T>(originalSequence, resumeFunction);
}
private static class OnErrorResumeNextViaFunction<T> extends Observable<T> {
- private final Func1<Observable<T>, Exception> resumeFunction;
+ private final Func1<Exception, Observable<T>> resumeFunction;
private final Observable<T> originalSequence;
- public OnErrorResumeNextViaFunction(Observable<T> originalSequence, Func1<Observable<T>, Exception> resumeFunction) {
+ public OnErrorResumeNextViaFunction(Observable<T> originalSequence, Func1<Exception, Observable<T>> resumeFunction) {
this.resumeFunction = resumeFunction;
this.originalSequence = originalSequence;
}
@@ -112,7 +112,7 @@ public void testResumeNext() {
final AtomicReference<Exception> receivedException = new AtomicReference<Exception>();
Subscription s = mock(Subscription.class);
TestObservable w = new TestObservable(s, "one");
- Func1<Observable<String>, Exception> resume = new Func1<Observable<String>, Exception>() {
+ Func1<Exception, Observable<String>> resume = new Func1<Exception, Observable<String>>() {
@Override
public Observable<String> call(Exception t1) {
@@ -150,7 +150,7 @@ public Observable<String> call(Exception t1) {
public void testFunctionThrowsError() {
Subscription s = mock(Subscription.class);
TestObservable w = new TestObservable(s, "one");
- Func1<Observable<String>, Exception> resume = new Func1<Observable<String>, Exception>() {
+ Func1<Exception, Observable<String>> resume = new Func1<Exception, Observable<String>>() {
@Override
public Observable<String> call(Exception t1) {
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorReturn.java b/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorReturn.java
index 85f233990c..8a546f96a7 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorReturn.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationOnErrorReturn.java
@@ -1,12 +1,12 @@
/**
* Copyright 2013 Netflix, Inc.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -36,15 +36,15 @@
*/
public final class OperationOnErrorReturn<T> {
- public static <T> Observable<T> onErrorReturn(Observable<T> originalSequence, Func1<T, Exception> resumeFunction) {
+ public static <T> Observable<T> onErrorReturn(Observable<T> originalSequence, Func1<Exception, T> resumeFunction) {
return new OnErrorReturn<T>(originalSequence, resumeFunction);
}
private static class OnErrorReturn<T> extends Observable<T> {
- private final Func1<T, Exception> resumeFunction;
+ private final Func1<Exception, T> resumeFunction;
private final Observable<T> originalSequence;
- public OnErrorReturn(Observable<T> originalSequence, Func1<T, Exception> resumeFunction) {
+ public OnErrorReturn(Observable<T> originalSequence, Func1<Exception, T> resumeFunction) {
this.resumeFunction = resumeFunction;
this.originalSequence = originalSequence;
}
@@ -120,7 +120,7 @@ public void testResumeNext() {
TestObservable w = new TestObservable(s, "one");
final AtomicReference<Exception> capturedException = new AtomicReference<Exception>();
- Observable<String> Observable = onErrorReturn(w, new Func1<String, Exception>() {
+ Observable<String> Observable = onErrorReturn(w, new Func1<Exception, String>() {
@Override
public String call(Exception e) {
@@ -156,7 +156,7 @@ public void testFunctionThrowsError() {
TestObservable w = new TestObservable(s, "one");
final AtomicReference<Exception> capturedException = new AtomicReference<Exception>();
- Observable<String> Observable = onErrorReturn(w, new Func1<String, Exception>() {
+ Observable<String> Observable = onErrorReturn(w, new Func1<Exception, String>() {
@Override
public String call(Exception e) {
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableFunction.java b/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableFunction.java
index 1af18116d5..bd8f4a7cf0 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableFunction.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableFunction.java
@@ -1,12 +1,12 @@
/**
* Copyright 2013 Netflix, Inc.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -37,14 +37,14 @@
*/
public final class OperationToObservableFunction<T> {
- public static <T> Observable<T> toObservableFunction(Func1<Subscription, Observer<T>> func) {
+ public static <T> Observable<T> toObservableFunction(Func1<Observer<T>, Subscription> func) {
return new ToObservableFunction<T>(func);
}
private static class ToObservableFunction<T> extends Observable<T> {
- private final Func1<Subscription, Observer<T>> func;
+ private final Func1<Observer<T>, Subscription> func;
- public ToObservableFunction(Func1<Subscription, Observer<T>> func) {
+ public ToObservableFunction(Func1<Observer<T>, Subscription> func) {
this.func = func;
}
@@ -67,7 +67,7 @@ public static class UnitTest {
@Test
public void testCreate() {
- Observable<String> observable = toObservableFunction(new Func1<Subscription, Observer<String>>() {
+ Observable<String> observable = toObservableFunction(new Func1<Observer<String>, Subscription>() {
@Override
public Subscription call(Observer<String> Observer) {
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableSortedList.java b/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableSortedList.java
index c8af62d356..276e39ce61 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableSortedList.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationToObservableSortedList.java
@@ -1,12 +1,12 @@
/**
* Copyright 2013 Netflix, Inc.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -59,7 +59,7 @@ public static <T> Observable<List<T>> toSortedList(Observable<T> sequence) {
* @param sortFunction
* @return
*/
- public static <T> Observable<List<T>> toSortedList(Observable<T> sequence, Func2<Integer, T, T> sortFunction) {
+ public static <T> Observable<List<T>> toSortedList(Observable<T> sequence, Func2<T, T, Integer> sortFunction) {
return new ToObservableSortedList<T>(sequence, sortFunction);
}
@@ -67,7 +67,7 @@ private static class ToObservableSortedList<T> extends Observable<List<T>> {
private final Observable<T> that;
private final ConcurrentLinkedQueue<T> list = new ConcurrentLinkedQueue<T>();
- private final Func2<Integer, T, T> sortFunction;
+ private final Func2<T, T, Integer> sortFunction;
// unchecked as we're support Object for the default
@SuppressWarnings("unchecked")
@@ -75,7 +75,7 @@ private ToObservableSortedList(Observable<T> that) {
this(that, defaultSortFunction);
}
- private ToObservableSortedList(Observable<T> that, Func2<Integer, T, T> sortFunction) {
+ private ToObservableSortedList(Observable<T> that, Func2<T, T, Integer> sortFunction) {
this.that = that;
this.sortFunction = sortFunction;
}
@@ -127,7 +127,7 @@ public int compare(T o1, T o2) {
@SuppressWarnings("rawtypes")
private static Func2 defaultSortFunction = new DefaultComparableFunction();
- private static class DefaultComparableFunction implements Func2<Integer, Object, Object> {
+ private static class DefaultComparableFunction implements Func2<Object, Object, Integer> {
// unchecked because we want to support Object for this default
@SuppressWarnings("unchecked")
diff --git a/rxjava-core/src/main/java/rx/observables/operations/OperationZip.java b/rxjava-core/src/main/java/rx/observables/operations/OperationZip.java
index e80fa1d7fb..824ce504f6 100644
--- a/rxjava-core/src/main/java/rx/observables/operations/OperationZip.java
+++ b/rxjava-core/src/main/java/rx/observables/operations/OperationZip.java
@@ -1,12 +1,12 @@
/**
* Copyright 2013 Netflix, Inc.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -40,14 +40,14 @@
public final class OperationZip {
- public static <R, T0, T1> Observable<R> zip(Observable<T0> w0, Observable<T1> w1, Func2<R, T0, T1> zipFunction) {
+ public static <T0, T1, R> Observable<R> zip(Observable<T0> w0, Observable<T1> w1, Func2<T0, T1, R> zipFunction) {
Aggregator<R> a = new Aggregator<R>(Functions.fromFunc(zipFunction));
a.addObserver(new ZipObserver<R, T0>(a, w0));
a.addObserver(new ZipObserver<R, T1>(a, w1));
return a;
}
- public static <R, T0, T1, T2> Observable<R> zip(Observable<T0> w0, Observable<T1> w1, Observable<T2> w2, Func3<R, T0, T1, T2> zipFunction) {
+ public static <T0, T1, T2, R> Observable<R> zip(Observable<T0> w0, Observable<T1> w1, Observable<T2> w2, Func3<T0, T1, T2, R> zipFunction) {
Aggregator<R> a = new Aggregator<R>(Functions.fromFunc(zipFunction));
a.addObserver(new ZipObserver<R, T0>(a, w0));
a.addObserver(new ZipObserver<R, T1>(a, w1));
@@ -55,7 +55,7 @@ public static <R, T0, T1, T2> Observable<R> zip(Observable<T0> w0, Observable<T1
return a;
}
- public static <R, T0, T1, T2, T3> Observable<R> zip(Observable<T0> w0, Observable<T1> w1, Observable<T2> w2, Observable<T3> w3, Func4<R, T0, T1, T2, T3> zipFunction) {
+ public static <T0, T1, T2, T3, R> Observable<R> zip(Observable<T0> w0, Observable<T1> w1, Observable<T2> w2, Observable<T3> w3, Func4<T0, T1, T2, T3, R> zipFunction) {
Aggregator<R> a = new Aggregator<R>(Functions.fromFunc(zipFunction));
a.addObserver(new ZipObserver<R, T0>(a, w0));
a.addObserver(new ZipObserver<R, T1>(a, w1));
@@ -660,7 +660,7 @@ public void testAggregatorEarlyCompletion() {
/* mock calls don't do generics */
@Test
public void testZip2Types() {
- Func2<String, String, Integer> zipr = getConcatStringIntegerZipr();
+ Func2<String, Integer, String> zipr = getConcatStringIntegerZipr();
/* define a Observer to receive aggregated events */
Observer<String> aObserver = mock(Observer.class);
@@ -679,7 +679,7 @@ public void testZip2Types() {
/* mock calls don't do generics */
@Test
public void testZip3Types() {
- Func3<String, String, Integer, int[]> zipr = getConcatStringIntegerIntArrayZipr();
+ Func3<String, Integer, int[], String> zipr = getConcatStringIntegerIntArrayZipr();
/* define a Observer to receive aggregated events */
Observer<String> aObserver = mock(Observer.class);
@@ -758,8 +758,8 @@ public String call(Object... args) {
return zipr;
}
- private Func2<String, String, Integer> getConcatStringIntegerZipr() {
- Func2<String, String, Integer> zipr = new Func2<String, String, Integer>() {
+ private Func2<String, Integer, String> getConcatStringIntegerZipr() {
+ Func2<String, Integer, String> zipr = new Func2<String, Integer, String>() {
@Override
public String call(String s, Integer i) {
@@ -770,8 +770,8 @@ public String call(String s, Integer i) {
return zipr;
}
- private Func3<String, String, Integer, int[]> getConcatStringIntegerIntArrayZipr() {
- Func3<String, String, Integer, int[]> zipr = new Func3<String, String, Integer, int[]>() {
+ private Func3<String, Integer, int[], String> getConcatStringIntegerIntArrayZipr() {
+ Func3<String, Integer, int[], String> zipr = new Func3<String, Integer, int[], String>() {
@Override
public String call(String s, Integer i, int[] iArray) {
diff --git a/rxjava-core/src/main/java/rx/util/Func1.java b/rxjava-core/src/main/java/rx/util/Func1.java
index 60c9aa99c9..ae67de59c5 100644
--- a/rxjava-core/src/main/java/rx/util/Func1.java
+++ b/rxjava-core/src/main/java/rx/util/Func1.java
@@ -15,6 +15,6 @@
*/
package rx.util;
-public interface Func1<R, T1> {
+public interface Func1<T1, R> {
public R call(T1 t1);
}
\ No newline at end of file
diff --git a/rxjava-core/src/main/java/rx/util/Func2.java b/rxjava-core/src/main/java/rx/util/Func2.java
index 2ab61df6c5..5c9fa7936d 100644
--- a/rxjava-core/src/main/java/rx/util/Func2.java
+++ b/rxjava-core/src/main/java/rx/util/Func2.java
@@ -15,6 +15,6 @@
*/
package rx.util;
-public interface Func2<R, T1, T2> {
+public interface Func2<T1, T2, R> {
public R call(T1 t1, T2 t2);
}
\ No newline at end of file
diff --git a/rxjava-core/src/main/java/rx/util/Func3.java b/rxjava-core/src/main/java/rx/util/Func3.java
index 733c1920c3..8147d35706 100644
--- a/rxjava-core/src/main/java/rx/util/Func3.java
+++ b/rxjava-core/src/main/java/rx/util/Func3.java
@@ -15,6 +15,6 @@
*/
package rx.util;
-public interface Func3<R, T1, T2, T3> {
+public interface Func3<T1, T2, T3, R> {
public R call(T1 t1, T2 t2, T3 t3);
}
\ No newline at end of file
diff --git a/rxjava-core/src/main/java/rx/util/Func4.java b/rxjava-core/src/main/java/rx/util/Func4.java
index e67e84bc93..2a4df7212b 100644
--- a/rxjava-core/src/main/java/rx/util/Func4.java
+++ b/rxjava-core/src/main/java/rx/util/Func4.java
@@ -15,6 +15,6 @@
*/
package rx.util;
-public interface Func4<R, T1, T2, T3, T4> {
+public interface Func4<T1, T2, T3, T4, R> {
public R call(T1 t1, T2 t2, T3 t3, T4 t4);
}
\ No newline at end of file
diff --git a/rxjava-core/src/main/java/rx/util/Func5.java b/rxjava-core/src/main/java/rx/util/Func5.java
index 83d8caca69..888334f4be 100644
--- a/rxjava-core/src/main/java/rx/util/Func5.java
+++ b/rxjava-core/src/main/java/rx/util/Func5.java
@@ -15,6 +15,6 @@
*/
package rx.util;
-public interface Func5<R, T1, T2, T3, T4, T5> {
+public interface Func5<T1, T2, T3, T4, T5, R> {
public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5);
}
\ No newline at end of file
diff --git a/rxjava-core/src/main/java/rx/util/Func6.java b/rxjava-core/src/main/java/rx/util/Func6.java
index ab0cd15bd6..1e036a5aff 100644
--- a/rxjava-core/src/main/java/rx/util/Func6.java
+++ b/rxjava-core/src/main/java/rx/util/Func6.java
@@ -15,6 +15,6 @@
*/
package rx.util;
-public interface Func6<R, T1, T2, T3, T4, T5, T6> {
+public interface Func6<T1, T2, T3, T4, T5, T6, R> {
public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6);
}
\ No newline at end of file
diff --git a/rxjava-core/src/main/java/rx/util/Func7.java b/rxjava-core/src/main/java/rx/util/Func7.java
index 0178f11242..6c6559013a 100644
--- a/rxjava-core/src/main/java/rx/util/Func7.java
+++ b/rxjava-core/src/main/java/rx/util/Func7.java
@@ -15,6 +15,6 @@
*/
package rx.util;
-public interface Func7<R, T1, T2, T3, T4, T5, T6, T7> {
+public interface Func7<T1, T2, T3, T4, T5, T6, T7, R> {
public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7);
}
\ No newline at end of file
diff --git a/rxjava-core/src/main/java/rx/util/Func8.java b/rxjava-core/src/main/java/rx/util/Func8.java
index dac05f14b2..2263823a7b 100644
--- a/rxjava-core/src/main/java/rx/util/Func8.java
+++ b/rxjava-core/src/main/java/rx/util/Func8.java
@@ -15,6 +15,6 @@
*/
package rx.util;
-public interface Func8<R, T1, T2, T3, T4, T5, T6, T7, T8> {
+public interface Func8<T1, T2, T3, T4, T5, T6, T7, T8, R> {
public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8);
}
\ No newline at end of file
diff --git a/rxjava-core/src/main/java/rx/util/Func9.java b/rxjava-core/src/main/java/rx/util/Func9.java
index c19feca538..2de318b122 100644
--- a/rxjava-core/src/main/java/rx/util/Func9.java
+++ b/rxjava-core/src/main/java/rx/util/Func9.java
@@ -15,6 +15,6 @@
*/
package rx.util;
-public interface Func9<R, T1, T2, T3, T4, T5, T6, T7, T8, T9> {
+public interface Func9<T1, T2, T3, T4, T5, T6, T7, T8, T9, R> {
public R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9);
}
\ No newline at end of file
diff --git a/rxjava-core/src/main/java/rx/util/Functions.java b/rxjava-core/src/main/java/rx/util/Functions.java
index 8b52390b86..1f935de4e7 100644
--- a/rxjava-core/src/main/java/rx/util/Functions.java
+++ b/rxjava-core/src/main/java/rx/util/Functions.java
@@ -121,25 +121,25 @@ public static <R> R execute(Object function, Object... args) {
}
return (R) f.call();
} else if (function instanceof Func1) {
- Func1<R, Object> f = (Func1<R, Object>) function;
+ Func1<Object, R> f = (Func1<Object, R>) function;
if (args.length != 1) {
throw new RuntimeException("The closure was Func1 and expected 1 argument, but we received: " + args.length);
}
return f.call(args[0]);
} else if (function instanceof Func2) {
- Func2<R, Object, Object> f = (Func2<R, Object, Object>) function;
+ Func2<Object, Object, R> f = (Func2<Object, Object, R>) function;
if (args.length != 2) {
throw new RuntimeException("The closure was Func2 and expected 2 arguments, but we received: " + args.length);
}
return f.call(args[0], args[1]);
} else if (function instanceof Func3) {
- Func3<R, Object, Object, Object> f = (Func3<R, Object, Object, Object>) function;
+ Func3<Object, Object, Object, R> f = (Func3<Object, Object, Object, R>) function;
if (args.length != 3) {
throw new RuntimeException("The closure was Func3 and expected 3 arguments, but we received: " + args.length);
}
return (R) f.call(args[0], args[1], args[2]);
} else if (function instanceof Func4) {
- Func4<R, Object, Object, Object, Object> f = (Func4<R, Object, Object, Object, Object>) function;
+ Func4<Object, Object, Object, Object, R> f = (Func4<Object, Object, Object, Object, R>) function;
if (args.length != 1) {
throw new RuntimeException("The closure was Func4 and expected 4 arguments, but we received: " + args.length);
}
@@ -163,7 +163,7 @@ public static <R> R execute(Object function, Object... args) {
}
}
- public static <R, T0> FuncN<R> fromFunc(final Func1<R, T0> f) {
+ public static <T0, R> FuncN<R> fromFunc(final Func1<T0, R> f) {
return new FuncN<R>() {
/**
@@ -184,7 +184,7 @@ public R call(Object... args) {
};
}
- public static <R, T0, T1> FuncN<R> fromFunc(final Func2<R, T0, T1> f) {
+ public static <T0, T1, R> FuncN<R> fromFunc(final Func2<T0, T1, R> f) {
return new FuncN<R>() {
/**
@@ -204,7 +204,7 @@ public R call(Object... args) {
};
}
- public static <R, T0, T1, T2> FuncN<R> fromFunc(final Func3<R, T0, T1, T2> f) {
+ public static <T0, T1, T2, R> FuncN<R> fromFunc(final Func3<T0, T1, T2, R> f) {
return new FuncN<R>() {
/**
@@ -224,7 +224,7 @@ public R call(Object... args) {
};
}
- public static <R, T0, T1, T2, T3> FuncN<R> fromFunc(final Func4<R, T0, T1, T2, T3> f) {
+ public static <T0, T1, T2, T3, R> FuncN<R> fromFunc(final Func4<T0, T1, T2, T3, R> f) {
return new FuncN<R>() {
/**
|
ebece5174a5ab834576c0f981682b78f7a6efe25
|
camel
|
CAMEL-1099: Added FileIdempotentRepositry--git-svn-id: https://svn.apache.org/repos/asf/activemq/camel/trunk@723291 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/main/java/org/apache/camel/processor/idempotent/FileIdempotentRepository.java b/camel-core/src/main/java/org/apache/camel/processor/idempotent/FileIdempotentRepository.java
index 776c5e3debfd8..d56f6c290bf61 100644
--- a/camel-core/src/main/java/org/apache/camel/processor/idempotent/FileIdempotentRepository.java
+++ b/camel-core/src/main/java/org/apache/camel/processor/idempotent/FileIdempotentRepository.java
@@ -21,6 +21,7 @@
import java.io.IOException;
import java.util.Map;
import java.util.Scanner;
+import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.camel.spi.IdempotentRepository;
import org.apache.camel.util.LRUCache;
@@ -40,34 +41,53 @@ public class FileIdempotentRepository implements IdempotentRepository<String> {
private static final transient Log LOG = LogFactory.getLog(FileIdempotentRepository.class);
private static final String STORE_DELIMITER = "\n";
private Map<String, Object> cache;
- private File store;
- private long maxStoreSize = 1024 * 1000L; // 1mb store file
+ private File fileStore;
+ private long maxFileStoreSize = 1024 * 1000L; // 1mb store file
+ private AtomicBoolean init = new AtomicBoolean();
- public FileIdempotentRepository(final File store, final Map<String, Object> set) {
- this.store = store;
+ public FileIdempotentRepository() {
+ // default use a 1st level cache
+ this.cache = new LRUCache<String, Object>(1000);
+ }
+
+ public FileIdempotentRepository(File fileStore, Map<String, Object> set) {
+ this.fileStore = fileStore;
this.cache = set;
- loadStore();
}
/**
* Creates a new file based repository using a {@link org.apache.camel.util.LRUCache}
* as 1st level cache with a default of 1000 entries in the cache.
*
- * @param store the file store
+ * @param fileStore the file store
*/
- public static IdempotentRepository fileIdempotentRepository(File store) {
- return fileIdempotentRepository(store, 1000);
+ public static IdempotentRepository fileIdempotentRepository(File fileStore) {
+ return fileIdempotentRepository(fileStore, 1000);
}
/**
* Creates a new file based repository using a {@link org.apache.camel.util.LRUCache}
* as 1st level cache.
*
- * @param store the file store
+ * @param fileStore the file store
* @param cacheSize the cache size
*/
- public static IdempotentRepository fileIdempotentRepository(File store, int cacheSize) {
- return fileIdempotentRepository(store, new LRUCache<String, Object>(cacheSize));
+ public static IdempotentRepository fileIdempotentRepository(File fileStore, int cacheSize) {
+ return fileIdempotentRepository(fileStore, new LRUCache<String, Object>(cacheSize));
+ }
+
+ /**
+ * Creates a new file based repository using a {@link org.apache.camel.util.LRUCache}
+ * as 1st level cache.
+ *
+ * @param fileStore the file store
+ * @param cacheSize the cache size
+ * @param maxFileStoreSize the max size in bytes for the filestore file
+ */
+ public static IdempotentRepository fileIdempotentRepository(File fileStore, int cacheSize, long maxFileStoreSize) {
+ FileIdempotentRepository repository = new FileIdempotentRepository(fileStore, new LRUCache<String, Object>(cacheSize));
+ repository.setMaxFileStoreSize(maxFileStoreSize);
+ return repository;
}
/**
@@ -86,11 +106,16 @@ public static IdempotentRepository fileIdempotentRepository(File store, Map<Stri
public boolean add(String messageId) {
synchronized (cache) {
+ // init store if not loaded before
+ if (init.compareAndSet(false, true)) {
+ loadStore();
+ }
+
if (cache.containsKey(messageId)) {
return false;
} else {
cache.put(messageId, messageId);
- if (store.length() < maxStoreSize) {
+ if (fileStore.length() < maxFileStoreSize) {
// just append to store
appendToStore(messageId);
} else {
@@ -105,16 +130,20 @@ public boolean add(String messageId) {
public boolean contains(String key) {
synchronized (cache) {
+ // init store if not loaded before
+ if (init.compareAndSet(false, true)) {
+ loadStore();
+ }
return cache.containsKey(key);
}
}
- public File getStore() {
- return store;
+ public File getFileStore() {
+ return fileStore;
}
- public void setStore(File store) {
- this.store = store;
+ public void setFileStore(File fileStore) {
+ this.fileStore = fileStore;
}
public Map<String, Object> getCache() {
@@ -125,8 +154,8 @@ public void setCache(Map<String, Object> cache) {
this.cache = cache;
}
- public long getMaxStoreSize() {
- return maxStoreSize;
+ public long getMaxFileStoreSize() {
+ return maxFileStoreSize;
}
/**
@@ -134,8 +163,18 @@ public long getMaxStoreSize() {
* <p/>
* The default is 1mb.
*/
- public void setMaxStoreSize(long maxStoreSize) {
- this.maxStoreSize = maxStoreSize;
+ public void setMaxFileStoreSize(long maxFileStoreSize) {
+ this.maxFileStoreSize = maxFileStoreSize;
+ }
+
+ /**
+ * Sets the cache size
+ */
+ public void setCacheSize(int size) {
+ if (cache != null) {
+ cache.clear();
+ }
+ cache = new LRUCache<String, Object>(size);
}
/**
@@ -145,11 +184,16 @@ public void setMaxStoreSize(long maxStoreSize) {
*/
protected void appendToStore(final String messageId) {
if (LOG.isDebugEnabled()) {
- LOG.debug("Appending " + messageId + " to idempotent filestore: " + store);
+ LOG.debug("Appending " + messageId + " to idempotent filestore: " + fileStore);
}
FileOutputStream fos = null;
try {
- fos = new FileOutputStream(store, true);
+ // create store if missing
+ if (!fileStore.exists()) {
+ fileStore.createNewFile();
+ }
+ // append to store
+ fos = new FileOutputStream(fileStore, true);
fos.write(messageId.getBytes());
fos.write(STORE_DELIMITER.getBytes());
} catch (IOException e) {
@@ -165,11 +209,11 @@ protected void appendToStore(final String messageId) {
*/
protected void trunkStore() {
if (LOG.isDebugEnabled()) {
- LOG.debug("Trunking idempotent filestore: " + store);
+ LOG.debug("Trunking idempotent filestore: " + fileStore);
}
FileOutputStream fos = null;
try {
- fos = new FileOutputStream(store);
+ fos = new FileOutputStream(fileStore);
for (String key : cache.keySet()) {
fos.write(key.getBytes());
fos.write(STORE_DELIMITER.getBytes());
@@ -186,17 +230,17 @@ protected void trunkStore() {
*/
protected void loadStore() {
if (LOG.isTraceEnabled()) {
- LOG.trace("Loading to 1st level cache from idempotent filestore: " + store);
+ LOG.trace("Loading to 1st level cache from idempotent filestore: " + fileStore);
}
- if (!store.exists()) {
+ if (!fileStore.exists()) {
return;
}
cache.clear();
Scanner scanner = null;
try {
- scanner = new Scanner(store);
+ scanner = new Scanner(fileStore);
scanner.useDelimiter(STORE_DELIMITER);
while (scanner.hasNextLine()) {
String line = scanner.nextLine();
@@ -211,7 +255,7 @@ protected void loadStore() {
}
if (LOG.isDebugEnabled()) {
- LOG.debug("Loaded " + cache.size() + " to the 1st level cache from idempotent filestore: " + store);
+ LOG.debug("Loaded " + cache.size() + " to the 1st level cache from idempotent filestore: " + fileStore);
}
}
diff --git a/camel-core/src/main/java/org/apache/camel/processor/idempotent/MemoryIdempotentRepository.java b/camel-core/src/main/java/org/apache/camel/processor/idempotent/MemoryIdempotentRepository.java
index ae22aca8bb7f3..700f0edb87f00 100644
--- a/camel-core/src/main/java/org/apache/camel/processor/idempotent/MemoryIdempotentRepository.java
+++ b/camel-core/src/main/java/org/apache/camel/processor/idempotent/MemoryIdempotentRepository.java
@@ -33,6 +33,10 @@ public class MemoryIdempotentRepository implements IdempotentRepository<String>
private Map<String, Object> cache;
+ public MemoryIdempotentRepository() {
+ this.cache = new LRUCache<String, Object>(1000);
+ }
+
public MemoryIdempotentRepository(Map<String, Object> set) {
this.cache = set;
}
@@ -42,7 +46,7 @@ public MemoryIdempotentRepository(Map<String, Object> set) {
* with a default of 1000 entries in the cache.
*/
public static IdempotentRepository memoryIdempotentRepository() {
- return memoryIdempotentRepository(1000);
+ return new MemoryIdempotentRepository();
}
/**
diff --git a/components/camel-spring/src/test/java/org/apache/camel/spring/processor/idempotent/FileConsumerIdempotentTest.java b/components/camel-spring/src/test/java/org/apache/camel/spring/processor/idempotent/FileConsumerIdempotentTest.java
new file mode 100644
index 0000000000000..46c038f1f741b
--- /dev/null
+++ b/components/camel-spring/src/test/java/org/apache/camel/spring/processor/idempotent/FileConsumerIdempotentTest.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.spring.processor.idempotent;
+
+import java.io.File;
+
+import org.apache.camel.CamelContext;
+import org.apache.camel.ContextTestSupport;
+import org.apache.camel.component.file.FileComponent;
+import org.apache.camel.component.mock.MockEndpoint;
+import org.apache.camel.spi.IdempotentRepository;
+import static org.apache.camel.spring.processor.SpringTestHelper.createSpringCamelContext;
+
+public class FileConsumerIdempotentTest extends ContextTestSupport {
+
+ private IdempotentRepository repo;
+
+ protected CamelContext createCamelContext() throws Exception {
+ return createSpringCamelContext(this, "org/apache/camel/spring/processor/idempotent/fileConsumerIdempotentTest.xml");
+ }
+
+ @Override
+ protected void setUp() throws Exception {
+ deleteDirectory("target/fileidempotent");
+
+ super.setUp();
+ repo = context.getRegistry().lookup("fileStore", IdempotentRepository.class);
+ }
+
+
+ public void testIdempotent() throws Exception {
+ assertFalse(repo.contains("report.txt"));
+
+ // send a file
+ template.sendBodyAndHeader("file://target/fileidempotent/", "Hello World", FileComponent.HEADER_FILE_NAME, "report.txt");
+
+ // consume the file the first time
+ MockEndpoint mock = getMockEndpoint("mock:result");
+ mock.expectedMessageCount(1);
+
+ assertMockEndpointsSatisfied();
+
+ // reset mock and set new expectations
+ mock.reset();
+ mock.expectedMessageCount(0);
+
+ // move file back
+ File file = new File("target/fileidempotent/done/report.txt");
+ File renamed = new File("target/fileidempotent/report.txt");
+ file = file.getAbsoluteFile();
+ file.renameTo(renamed.getAbsoluteFile());
+
+ // sleep to let the consumer try to poll the file
+ Thread.sleep(2000);
+
+ // should NOT consume the file again, let 2 secs pass to let the consumer try to consume it but it should not
+ assertMockEndpointsSatisfied();
+
+ assertTrue(repo.contains("report.txt"));
+ }
+
+}
+
diff --git a/components/camel-spring/src/test/resources/org/apache/camel/spring/processor/idempotent/fileConsumerIdempotentTest.xml b/components/camel-spring/src/test/resources/org/apache/camel/spring/processor/idempotent/fileConsumerIdempotentTest.xml
new file mode 100644
index 0000000000000..8714d14db6bd3
--- /dev/null
+++ b/components/camel-spring/src/test/resources/org/apache/camel/spring/processor/idempotent/fileConsumerIdempotentTest.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="
+ http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd
+ http://activemq.apache.org/camel/schema/spring http://activemq.apache.org/camel/schema/spring/camel-spring.xsd
+ ">
+
+ <!-- START SNIPPET: example -->
+ <!-- this is our file based idempotent store configured to use the .filestore.dat as file -->
+ <bean id="fileStore" class="org.apache.camel.processor.idempotent.FileIdempotentRepository">
+ <!-- the filename for the store -->
+ <property name="fileStore" value="target/fileidempotent/.filestore.dat"/>
+ <!-- the max filesize in bytes for the file. Camel will trunk and flush the cache
+ if the file gets bigger -->
+ <property name="maxFileStoreSize" value="512000"/>
+ <!-- the number of elements in our store -->
+ <property name="cacheSize" value="250"/>
+ </bean>
+
+ <camelContext id="camel" xmlns="http://activemq.apache.org/camel/schema/spring">
+ <route>
+ <from uri="file://target/fileidempotent/?idempotent=true&idempotentRepositoryRef=fileStore&moveNamePrefix=done/"/>
+ <to uri="mock:result"/>
+ </route>
+ </camelContext>
+ <!-- END SNIPPET: example -->
+</beans>
|
2d6a57d5727d4db527caffd7827d60bb2e303846
|
camel
|
CAMEL-3014 camel-josql should have SQL language- META-INF which can be found automatically by camel-core--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@980675 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/camel
|
diff --git a/components/camel-josql/pom.xml b/components/camel-josql/pom.xml
index a7eed13eef4b4..72ddef0a64ca6 100644
--- a/components/camel-josql/pom.xml
+++ b/components/camel-josql/pom.xml
@@ -64,7 +64,13 @@
<dependency>
<groupId>commons-logging</groupId>
- <artifactId>commons-logging-api</artifactId>
+ <artifactId>commons-logging</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>log4j</groupId>
+ <artifactId>log4j</artifactId>
+ <scope>test</scope>
</dependency>
<dependency>
diff --git a/components/camel-josql/src/main/java/org/apache/camel/language/sql/SqlLanguage.java b/components/camel-josql/src/main/java/org/apache/camel/language/sql/SqlLanguage.java
new file mode 100644
index 0000000000000..99d458286d71f
--- /dev/null
+++ b/components/camel-josql/src/main/java/org/apache/camel/language/sql/SqlLanguage.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.language.sql;
+
+import javax.xml.namespace.QName;
+
+import org.apache.camel.Expression;
+import org.apache.camel.IsSingleton;
+import org.apache.camel.Predicate;
+import org.apache.camel.RuntimeCamelException;
+import org.apache.camel.builder.sql.SqlBuilder;
+import org.apache.camel.builder.xml.XPathBuilder;
+import org.apache.camel.spi.Language;
+import org.josql.QueryParseException;
+
+/**
+ * XPath language.
+ *
+ * @version $Revision$
+ */
+public class SqlLanguage implements Language, IsSingleton {
+
+ public Predicate createPredicate(String expression) {
+ try {
+ SqlBuilder builder = SqlBuilder.sql(expression);
+ return builder;
+ } catch (QueryParseException e) {
+ RuntimeException exception = new RuntimeCamelException("Canont create the SqlBuilder.", e);
+ throw exception;
+ }
+
+ }
+
+ public Expression createExpression(String expression) {
+ try {
+ SqlBuilder builder = SqlBuilder.sql(expression);
+ return builder;
+ } catch (QueryParseException e) {
+ RuntimeException exception = new RuntimeCamelException("Canont create the SqlBuilder.", e);
+ throw exception;
+ }
+ }
+
+ public boolean isSingleton() {
+ return false;
+ }
+}
diff --git a/components/camel-josql/src/main/resources/META-INF/services/org/apache/camel/language/sql b/components/camel-josql/src/main/resources/META-INF/services/org/apache/camel/language/sql
new file mode 100644
index 0000000000000..dce8f091373f0
--- /dev/null
+++ b/components/camel-josql/src/main/resources/META-INF/services/org/apache/camel/language/sql
@@ -0,0 +1,18 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+class=org.apache.camel.language.sql.SqlLanguage
diff --git a/components/camel-josql/src/test/java/org/apache/camel/builder/sql/SqlTest.java b/components/camel-josql/src/test/java/org/apache/camel/builder/sql/SqlTest.java
index 96a6785b7004f..ef9bc1927c30b 100644
--- a/components/camel-josql/src/test/java/org/apache/camel/builder/sql/SqlTest.java
+++ b/components/camel-josql/src/test/java/org/apache/camel/builder/sql/SqlTest.java
@@ -24,6 +24,7 @@
import org.apache.camel.Message;
import org.apache.camel.impl.DefaultCamelContext;
import org.apache.camel.impl.DefaultExchange;
+import org.apache.camel.test.junit4.CamelTestSupport;
import org.apache.camel.test.junit4.TestSupport;
import org.junit.Before;
import org.junit.Test;
@@ -33,9 +34,9 @@
/**
* @version $Revision$
*/
-public class SqlTest extends TestSupport {
+public class SqlTest extends CamelTestSupport {
- protected CamelContext context = new DefaultCamelContext();
+ //protected CamelContext context = new DefaultCamelContext();
protected Exchange exchange;
@Test
@@ -79,7 +80,8 @@ public void testPredicateWithHeaderVariable() throws Exception {
}
@Before
- public void setUp() throws Exception {
+ public void setUp() throws Exception {
+ super.setUp();
exchange = createExchange();
}
diff --git a/components/camel-josql/src/test/java/org/apache/camel/lanaguage/sql/SqlLanguageTest.java b/components/camel-josql/src/test/java/org/apache/camel/lanaguage/sql/SqlLanguageTest.java
new file mode 100644
index 0000000000000..3058dd1c68d0d
--- /dev/null
+++ b/components/camel-josql/src/test/java/org/apache/camel/lanaguage/sql/SqlLanguageTest.java
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.lanaguage.sql;
+
+import java.util.List;
+
+import org.apache.camel.Expression;
+import org.apache.camel.builder.sql.Person;
+import org.apache.camel.builder.sql.SqlTest;
+import org.apache.camel.spi.Language;
+import org.junit.Test;
+
+/**
+ * @version $Revision$
+ */
+public class SqlLanguageTest extends SqlTest {
+
+ @Test
+ public void testExpression() throws Exception {
+ Language language = assertResolveLanguage(getLanguageName());
+
+ Expression expression = language.createExpression("SELECT * FROM org.apache.camel.builder.sql.Person where city = 'London'");
+ List value = expression.evaluate(exchange, List.class);
+
+ List list = (List)value;
+ assertEquals("List size", 2, list.size());
+
+ for (Object person : list) {
+ log.info("Found: " + person);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ @Test
+ public void testExpressionWithHeaderVariable() throws Exception {
+ Language language = assertResolveLanguage(getLanguageName());
+
+ Expression expression = language.createExpression("SELECT * FROM org.apache.camel.builder.sql.Person where name = :fooHeader");
+ List value = expression.evaluate(exchange, List.class);
+
+ List<Person> list = (List<Person>)value;
+ assertEquals("List size", 1, list.size());
+
+ for (Person person : list) {
+ log.info("Found: " + person);
+
+ assertEquals("name", "James", person.getName());
+ }
+ }
+
+ @Test
+ public void testPredicates() throws Exception {
+ Language language = assertResolveLanguage(getLanguageName());
+ assertPredicate(language.createPredicate("SELECT * FROM org.apache.camel.builder.sql.Person where city = 'London'"), exchange, true);
+ assertPredicate(language.createPredicate("SELECT * FROM org.apache.camel.builder.sql.Person where city = 'Manchester'"), exchange, false);
+ }
+
+ @Test
+ public void testPredicateWithHeaderVariable() throws Exception {
+ Language language = assertResolveLanguage(getLanguageName());
+ assertPredicate(language.createPredicate("SELECT * FROM org.apache.camel.builder.sql.Person where name = :fooHeader"), exchange, true);
+ }
+
+ protected String getLanguageName() {
+ return "sql";
+ }
+
+}
|
a77138a065b1b594fdb7351f9503be4496995b97
|
intellij-community
|
fixed django template commenter again (PY-1949)--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/platform/lang-impl/src/com/intellij/codeInsight/generation/CommentByLineCommentHandler.java b/platform/lang-impl/src/com/intellij/codeInsight/generation/CommentByLineCommentHandler.java
index ed3c2b59d9ac9..c3c8731430c30 100644
--- a/platform/lang-impl/src/com/intellij/codeInsight/generation/CommentByLineCommentHandler.java
+++ b/platform/lang-impl/src/com/intellij/codeInsight/generation/CommentByLineCommentHandler.java
@@ -488,7 +488,6 @@ private void uncommentLine(int line) {
int start = startOffset + lineText.indexOf(suffix);
myDocument.deleteString(start, start + suffix.length());
}
-
}
boolean skipNewLine = false;
@@ -554,9 +553,10 @@ private void commentLine(int line, int offset, @Nullable Commenter commenter) {
endOffset = CharArrayUtil.shiftBackward(myDocument.getCharsSequence(), endOffset, " \t");
int shiftedStartOffset = CharArrayUtil.shiftForward(myDocument.getCharsSequence(), offset, " \t");
String lineSuffix = ((CommenterWithLineSuffix)commenter).getLineCommentSuffix();
- if (!CharArrayUtil.regionMatches(myDocument.getCharsSequence(), endOffset - lineSuffix.length(), lineSuffix) &&
- !CharArrayUtil.regionMatches(myDocument.getCharsSequence(), shiftedStartOffset, prefix)) {
- myDocument.insertString(endOffset, lineSuffix);
+ if (!CharArrayUtil.regionMatches(myDocument.getCharsSequence(), shiftedStartOffset, prefix)) {
+ if (!CharArrayUtil.regionMatches(myDocument.getCharsSequence(), endOffset - lineSuffix.length(), lineSuffix)) {
+ myDocument.insertString(endOffset, lineSuffix);
+ }
myDocument.insertString(offset, prefix);
}
}
|
887139648d2e693bb50f286810231150bf1fba9f
|
drools
|
BZ-986000 - DRL-to-RuleModel marshalling- improvements--
|
p
|
https://github.com/kiegroup/drools
|
diff --git a/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceImpl.java b/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceImpl.java
index 069fea6cbb3..bfb2af28075 100644
--- a/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceImpl.java
+++ b/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceImpl.java
@@ -740,8 +740,10 @@ public void generateSeparator( FieldConstraint constr,
if ( !gctx.isHasOutput() ) {
return;
}
- if ( gctx.getDepth() == 0 ) {
- buf.append( ", " );
+ if ( gctx.getDepth() == 0 ) {
+ if (buf.length() > 2 && !(buf.charAt(buf.length() - 2) == ',')) {
+ buf.append(", ");
+ }
} else {
CompositeFieldConstraint cconstr = (CompositeFieldConstraint) gctx.getParent().getFieldConstraint();
buf.append( cconstr.getCompositeJunctionType() + " " );
@@ -800,18 +802,18 @@ private void generateSingleFieldConstraint( final SingleFieldConstraint constr,
assertConstraintValue( constr );
if ( isConstraintComplete( constr ) ) {
- SingleFieldConstraint parent = (SingleFieldConstraint) constr.getParent();
- StringBuilder parentBuf = new StringBuilder();
- while ( parent != null ) {
- String fieldName = parent.getFieldName();
- parentBuf.insert( 0,
- fieldName + "." );
- parent = (SingleFieldConstraint) parent.getParent();
- }
- buf.append( parentBuf );
if ( constr instanceof SingleFieldConstraintEBLeftSide ) {
buf.append( ( (SingleFieldConstraintEBLeftSide) constr ).getExpressionLeftSide().getText() );
} else {
+ SingleFieldConstraint parent = (SingleFieldConstraint) constr.getParent();
+ StringBuilder parentBuf = new StringBuilder();
+ while ( parent != null ) {
+ String fieldName = parent.getFieldName();
+ parentBuf.insert( 0,
+ fieldName + "." );
+ parent = (SingleFieldConstraint) parent.getParent();
+ }
+ buf.append( parentBuf );
String fieldName = constr.getFieldName();
buf.append( fieldName );
}
diff --git a/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceTest.java b/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceTest.java
index a2b7bfaa608..71e2ede3d3f 100644
--- a/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceTest.java
+++ b/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceTest.java
@@ -275,7 +275,6 @@ public void testSumAsGivenValue() {
}
@Test
- @Ignore
public void testNotNull() {
String expected = "" +
"rule \"my rule\" \n" +
@@ -284,6 +283,37 @@ public void testNotNull() {
" Customer( contact != null , contact.tel1 > 15 )\n" +
" then\n" +
"end\n";
+
+ PackageDataModelOracle dmo = mock(PackageDataModelOracle.class);
+ when(
+ dmo.getProjectModelFields()
+ ).thenReturn(
+ new HashMap<String, ModelField[]>() {{
+ put("Customer",
+ new ModelField[]{
+ new ModelField(
+ "contact",
+ "Contact",
+ ModelField.FIELD_CLASS_TYPE.TYPE_DECLARATION_CLASS,
+ ModelField.FIELD_ORIGIN.DECLARED,
+ FieldAccessorsAndMutators.BOTH,
+ "Contact"
+ )
+ });
+ put("Contact",
+ new ModelField[]{
+ new ModelField(
+ "tel1",
+ "Integer",
+ ModelField.FIELD_CLASS_TYPE.TYPE_DECLARATION_CLASS,
+ ModelField.FIELD_ORIGIN.DECLARED,
+ FieldAccessorsAndMutators.BOTH,
+ "Integer"
+ )
+ });
+ }}
+ );
+
final RuleModel m = new RuleModel();
FactPattern factPattern = new FactPattern();
@@ -304,7 +334,7 @@ public void testNotNull() {
m.name = "my rule";
- checkMarshallUnmarshall(expected, m);
+ checkMarshallUnmarshall(expected, m, dmo);
}
@Test
|
85407ae04f15917e2ff48f93929cc6b7e88c9c23
|
drools
|
[DROOLS-740] fix jitting of constraint with strings- concatenation--
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/test/java/org/drools/compiler/integrationtests/Misc2Test.java b/drools-compiler/src/test/java/org/drools/compiler/integrationtests/Misc2Test.java
index e2b1b4bc186..e508a1a71f6 100644
--- a/drools-compiler/src/test/java/org/drools/compiler/integrationtests/Misc2Test.java
+++ b/drools-compiler/src/test/java/org/drools/compiler/integrationtests/Misc2Test.java
@@ -7221,4 +7221,23 @@ public void testCompilationFailureOnNonExistingVariable() {
assertDrlHasCompilationError(drl1, 1);
}
+
+ @Test
+ public void testJittedConstraintStringAndLong() {
+ // DROOLS-740
+ String drl =
+ " import org.drools.compiler.Person; " +
+ " rule 'hello person' " +
+ " when " +
+ " Person( name == \"Elizabeth\" + new Long(2L) ) " +
+ " then " +
+ " end " +
+ "\n";
+ KieSession ksession = new KieHelper().addContent(drl, ResourceType.DRL)
+ .build()
+ .newKieSession();
+
+ ksession.insert(new org.drools.compiler.Person("Elizabeth2", 88));
+ assertEquals(1, ksession.fireAllRules());
+ }
}
\ No newline at end of file
diff --git a/drools-core/src/main/java/org/drools/core/rule/constraint/ASMConditionEvaluatorJitter.java b/drools-core/src/main/java/org/drools/core/rule/constraint/ASMConditionEvaluatorJitter.java
index 2808af69aa3..48ab8a63163 100644
--- a/drools-core/src/main/java/org/drools/core/rule/constraint/ASMConditionEvaluatorJitter.java
+++ b/drools-core/src/main/java/org/drools/core/rule/constraint/ASMConditionEvaluatorJitter.java
@@ -675,12 +675,16 @@ private Class<?> jitAritmeticExpression(AritmeticExpression aritmeticExpression)
private void jitStringConcat(Expression left, Expression right) {
invokeConstructor(StringBuilder.class);
jitExpression(left, String.class);
- invokeVirtual(StringBuilder.class, "append", StringBuilder.class, left.getType());
+ invokeVirtual(StringBuilder.class, "append", StringBuilder.class, getTypeForAppend(left.getType()));
jitExpression(right, String.class);
- invokeVirtual(StringBuilder.class, "append", StringBuilder.class, right.getType());
+ invokeVirtual(StringBuilder.class, "append", StringBuilder.class, getTypeForAppend(right.getType()));
invokeVirtual(StringBuilder.class, "toString", String.class);
}
+ private Class<?> getTypeForAppend(Class<?> c) {
+ return c.isPrimitive() ? c : Object.class;
+ }
+
private void jitExpressionToPrimitiveType(Expression expression, Class<?> primitiveType) {
jitExpression(expression, primitiveType);
if (!isFixed(expression)) {
|
28c3b04e75aaf2f1b33013cecebd38ae0d6a0b88
|
restlet-framework-java
|
Introducing a new Directory constructor with a- LocalReference instance.--
|
a
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/module/org.restlet.test/src/org/restlet/test/DirectoryTestCase.java b/module/org.restlet.test/src/org/restlet/test/DirectoryTestCase.java
index 3d664146a8..af7ac086b5 100644
--- a/module/org.restlet.test/src/org/restlet/test/DirectoryTestCase.java
+++ b/module/org.restlet.test/src/org/restlet/test/DirectoryTestCase.java
@@ -27,6 +27,7 @@
import org.restlet.Component;
import org.restlet.Directory;
import org.restlet.Restlet;
+import org.restlet.data.LocalReference;
import org.restlet.data.Method;
import org.restlet.data.Protocol;
import org.restlet.data.Request;
@@ -377,12 +378,11 @@ public MyApplication(Component component) {
* @param component
* The component.
*/
- public MyApplication(Component component, File testDirectory) {
+ public MyApplication(Component component, File testDirectory) throws IOException {
super(component);
this.setTestDirectory(testDirectory);
// Create a DirectoryHandler that manages a local Directory
- this.directory = new Directory(getContext(), getTestDirectory()
- .toURI().toString());
+ this.directory = new Directory(getContext(), LocalReference.createFileReference(getTestDirectory()));
this.directory.setNegotiateContent(true);
}
|
c46153c7e058b212d97698f5b6f06733d669d46f
|
ReactiveX-RxJava
|
As per suggestions: Added single static instance of- ExecutorService for delayed posting Introduced ScheduledIOSAction to enable- CompositeSubscription--
|
a
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-contrib/rxjava-ios/src/main/java/rx/ios/schedulers/HandlerThreadScheduler.java b/rxjava-contrib/rxjava-ios/src/main/java/rx/ios/schedulers/HandlerThreadScheduler.java
index a57aa8b3e5..151f5bef41 100644
--- a/rxjava-contrib/rxjava-ios/src/main/java/rx/ios/schedulers/HandlerThreadScheduler.java
+++ b/rxjava-contrib/rxjava-ios/src/main/java/rx/ios/schedulers/HandlerThreadScheduler.java
@@ -1,4 +1,3 @@
-package rx.ios.schedulers;
/**
* Copyright 2013 Netflix, Inc.
* Copyright 2014 Ashley Williams
@@ -16,16 +15,18 @@
* limitations under the License.
*/
+package rx.ios.schedulers;
-import org.robovm.apple.foundation.NSBlockOperation;
import org.robovm.apple.foundation.NSOperationQueue;
import rx.Scheduler;
import rx.Subscription;
import rx.functions.Action0;
-import rx.subscriptions.BooleanSubscription;
+import rx.internal.util.RxThreadFactory;
+import rx.subscriptions.CompositeSubscription;
import rx.subscriptions.Subscriptions;
import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
@@ -35,6 +36,8 @@
public class HandlerThreadScheduler extends Scheduler {
private final NSOperationQueue operationQueue;
+ private static final String THREAD_PREFIX = "RxiOSScheduledExecutorPool-";
+
public HandlerThreadScheduler(NSOperationQueue operationQueue) {
this.operationQueue = operationQueue;
@@ -49,7 +52,7 @@ public Worker createWorker() {
private static class InnerHandlerThreadScheduler extends Worker {
private final NSOperationQueue operationQueue;
- private BooleanSubscription innerSubscription = new BooleanSubscription();
+ private CompositeSubscription innerSubscription = new CompositeSubscription();
public InnerHandlerThreadScheduler(NSOperationQueue operationQueue) {
@@ -67,43 +70,53 @@ public boolean isUnsubscribed() {
}
@Override
- public Subscription schedule(Action0 action0) {
- return schedule(action0, 0, TimeUnit.MILLISECONDS);
+ public Subscription schedule(final Action0 action) {
+ return schedule(action, 0, null);
}
@Override
public Subscription schedule(final Action0 action, long delayTime, TimeUnit unit) {
+ return scheduledAction(action, delayTime, unit);
+ }
+
+ public Subscription scheduledAction(final Action0 action, long delay, TimeUnit unit) {
- ScheduledExecutorService executor = Executors.newScheduledThreadPool(1);
- final NSBlockOperation runOperation = new NSBlockOperation();
-
- executor.schedule(new Runnable() {
- @Override
- public void run() {
- if (isUnsubscribed()) {
- return;
- }
- /* Runnable for action */
- final Runnable actionRunner = new Runnable() {
- @Override
- public void run() {
- action.call();
- }
- };
-
- runOperation.addExecutionBlock$(actionRunner);
-
- /* Add operation to operation queue*/
- operationQueue.addOperation(runOperation);
- }
- }, delayTime, unit);
-
- return Subscriptions.create(new Action0() {
- @Override
- public void call() {
- runOperation.cancel();
- }
- });
+ if (innerSubscription.isUnsubscribed()) {
+ return Subscriptions.empty();
+ }
+
+ final ScheduledIOSAction scheduledAction = new ScheduledIOSAction(action, operationQueue);
+ final ScheduledExecutorService executor = IOSScheduledExecutorPool.getInstance();
+
+ Future<?> future;
+ if (delay <= 0) {
+ future = executor.submit(scheduledAction);
+ } else {
+ future = executor.schedule(scheduledAction, delay, unit);
+ }
+
+ scheduledAction.add(Subscriptions.from(future));
+ scheduledAction.addParent(innerSubscription);
+
+ return scheduledAction;
}
}
+
+
+ private static final class IOSScheduledExecutorPool {
+
+ private static final RxThreadFactory THREAD_FACTORY = new RxThreadFactory(THREAD_PREFIX);
+
+ private static IOSScheduledExecutorPool INSTANCE = new IOSScheduledExecutorPool();
+ private final ScheduledExecutorService executorService;
+
+ private IOSScheduledExecutorPool() {
+ executorService = Executors.newScheduledThreadPool(1, THREAD_FACTORY);
+ }
+
+ public static ScheduledExecutorService getInstance() {
+ return INSTANCE.executorService;
+ }
+ }
+
}
diff --git a/rxjava-contrib/rxjava-ios/src/main/java/rx/ios/schedulers/ScheduledIOSAction.java b/rxjava-contrib/rxjava-ios/src/main/java/rx/ios/schedulers/ScheduledIOSAction.java
new file mode 100644
index 0000000000..c7e2bfd476
--- /dev/null
+++ b/rxjava-contrib/rxjava-ios/src/main/java/rx/ios/schedulers/ScheduledIOSAction.java
@@ -0,0 +1,130 @@
+/**
+ * Copyright 2014 Netflix, Inc.
+ * Copyright 2014 Ashley Williams
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package rx.ios.schedulers;
+
+import org.robovm.apple.foundation.NSBlockOperation;
+import org.robovm.apple.foundation.NSOperationQueue;
+import rx.Subscription;
+import rx.functions.Action0;
+import rx.subscriptions.CompositeSubscription;
+
+import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
+
+/**
+ * Based on {@code ScheduledAction} - A {@code Runnable} that executes an {@code Action0}
+ * that can be cancelled.
+ */
+final class ScheduledIOSAction implements Runnable, Subscription {
+ final CompositeSubscription cancel;
+ final Action0 action;
+ NSBlockOperation nsBlockOperation;
+ final NSOperationQueue operationQueue;
+ volatile int once;
+ static final AtomicIntegerFieldUpdater<ScheduledIOSAction> ONCE_UPDATER
+ = AtomicIntegerFieldUpdater.newUpdater(ScheduledIOSAction.class, "once");
+
+ public ScheduledIOSAction(Action0 action, NSOperationQueue operationQueue) {
+ this.action = action;
+ this.operationQueue = operationQueue;
+ this.cancel = new CompositeSubscription();
+
+ nsBlockOperation = new NSBlockOperation();
+ }
+
+ @Override
+ public void run() {
+ try {
+
+ final Runnable actionRunner = new Runnable() {
+ @Override
+ public void run() {
+ action.call();
+ }
+ };
+
+ nsBlockOperation.addExecutionBlock$(actionRunner);
+
+ /* Add operation to operation queue*/
+ operationQueue.addOperation(nsBlockOperation);
+
+ } finally {
+ unsubscribe();
+ }
+ }
+
+ @Override
+ public boolean isUnsubscribed() {
+ return cancel.isUnsubscribed();
+ }
+
+ @Override
+ public void unsubscribe() {
+ if (ONCE_UPDATER.compareAndSet(this, 0, 1)) {
+ nsBlockOperation.cancel();
+ cancel.unsubscribe();
+ System.err.println("cancelled");
+ }
+ }
+
+ /**
+ * Adds a {@code Subscription} to the {@link CompositeSubscription} to be later cancelled on unsubscribe
+ *
+ * @param s subscription to add
+ */
+ public void add(Subscription s) {
+ cancel.add(s);
+ }
+
+ /**
+ * Adds a parent {@link rx.subscriptions.CompositeSubscription} to this {@code ScheduledIOSAction} so when
+ * the action is cancelled or terminates, it can remove itself from this parent
+ * @param parent the parent {@code CompositeSubscription} to add
+ */
+ public void addParent(CompositeSubscription parent) {
+ cancel.add(new Remover(this, parent));
+ }
+
+
+ /**
+ * Remove a child subscription from a composite when unsubscribing
+ */
+ private static final class Remover implements Subscription {
+ final Subscription s;
+ final CompositeSubscription parent;
+ volatile int once;
+ static final AtomicIntegerFieldUpdater<Remover> ONCE_UPDATER
+ = AtomicIntegerFieldUpdater.newUpdater(Remover.class, "once");
+
+ public Remover(Subscription s, CompositeSubscription parent) {
+ this.s = s;
+ this.parent = parent;
+ }
+
+ @Override
+ public boolean isUnsubscribed() {
+ return s.isUnsubscribed();
+ }
+
+ @Override
+ public void unsubscribe() {
+ if (ONCE_UPDATER.compareAndSet(this, 0, 1)) {
+ parent.remove(s);
+ }
+ }
+ }
+}
\ No newline at end of file
|
058d4d4b5ffe40b8e93c7593f0b5346373455480
|
camel
|
CAMEL-3788 Merged the patch into camel-http4--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1083724 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/camel
|
diff --git a/components/camel-http4/src/main/java/org/apache/camel/component/http4/HttpProducer.java b/components/camel-http4/src/main/java/org/apache/camel/component/http4/HttpProducer.java
index cf24b151c985a..8de7b8d7aca64 100644
--- a/components/camel-http4/src/main/java/org/apache/camel/component/http4/HttpProducer.java
+++ b/components/camel-http4/src/main/java/org/apache/camel/component/http4/HttpProducer.java
@@ -127,11 +127,13 @@ public HttpEndpoint getEndpoint() {
protected void populateResponse(Exchange exchange, HttpRequestBase httpRequest, HttpResponse httpResponse,
Message in, HeaderFilterStrategy strategy, int responseCode) throws IOException, ClassNotFoundException {
+ // We just make the out message is not create when extractResponseBody throws exception
+ Object response = extractResponseBody(httpRequest, httpResponse, exchange);
Message answer = exchange.getOut();
answer.setHeaders(in.getHeaders());
answer.setHeader(Exchange.HTTP_RESPONSE_CODE, responseCode);
- answer.setBody(extractResponseBody(httpRequest, httpResponse, exchange));
+ answer.setBody(response);
// propagate HTTP response headers
Header[] headers = httpResponse.getAllHeaders();
|
3fdd2f2b7464a310674bb62f58dcbff90d1ad16a
|
restlet-framework-java
|
- Fixed bugs--
|
c
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/source/main/com/noelios/restlet/build/ChainletBuilder.java b/source/main/com/noelios/restlet/build/ChainletBuilder.java
index ed7fe22341..e5ea77d6c9 100644
--- a/source/main/com/noelios/restlet/build/ChainletBuilder.java
+++ b/source/main/com/noelios/restlet/build/ChainletBuilder.java
@@ -62,7 +62,7 @@ public ChainletBuilder(DefaultBuilder parent, Chainlet node)
*/
public Chainlet getNode()
{
- return (Chainlet)getNode();
+ return (Chainlet)super.getNode();
}
/**
diff --git a/source/main/com/noelios/restlet/build/ComponentBuilder.java b/source/main/com/noelios/restlet/build/ComponentBuilder.java
index 5135b31ba9..80d42b3092 100644
--- a/source/main/com/noelios/restlet/build/ComponentBuilder.java
+++ b/source/main/com/noelios/restlet/build/ComponentBuilder.java
@@ -51,7 +51,7 @@ public ComponentBuilder(DefaultBuilder parent, Component node)
*/
public Component getNode()
{
- return (Component)getNode();
+ return (Component)super.getNode();
}
/**
diff --git a/source/main/com/noelios/restlet/build/ExtractChainletBuilder.java b/source/main/com/noelios/restlet/build/ExtractChainletBuilder.java
index 40d40eac8b..22945cda33 100644
--- a/source/main/com/noelios/restlet/build/ExtractChainletBuilder.java
+++ b/source/main/com/noelios/restlet/build/ExtractChainletBuilder.java
@@ -46,7 +46,7 @@ public ExtractChainletBuilder(DefaultBuilder parent, ExtractChainlet node)
*/
public ExtractChainlet getNode()
{
- return (ExtractChainlet)getNode();
+ return (ExtractChainlet)super.getNode();
}
/**
diff --git a/source/main/com/noelios/restlet/build/GuardChainletBuilder.java b/source/main/com/noelios/restlet/build/GuardChainletBuilder.java
index 420e4a20cd..1dd5e1c17b 100644
--- a/source/main/com/noelios/restlet/build/GuardChainletBuilder.java
+++ b/source/main/com/noelios/restlet/build/GuardChainletBuilder.java
@@ -46,7 +46,7 @@ public GuardChainletBuilder(DefaultBuilder parent, GuardChainlet node)
*/
public GuardChainlet getNode()
{
- return (GuardChainlet)getNode();
+ return (GuardChainlet)super.getNode();
}
/**
|
8c475876dda2507977fd7282c37462136400daf2
|
drools
|
-Added fixes for waltz to run waltz50--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@7071 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Edge.java b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Edge.java
index d114ffd5441..b1ec6212171 100644
--- a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Edge.java
+++ b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Edge.java
@@ -39,6 +39,10 @@ public class Edge {
final public static String MINUS = "-";
+ public Edge() {
+
+ }
+
public Edge(final int p1,
final int p2,
final boolean joined,
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Junction.java b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Junction.java
index dc50219454d..ca1e1b51585 100644
--- a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Junction.java
+++ b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Junction.java
@@ -39,7 +39,11 @@ public class Junction {
private int basePoint;
private String type;
-
+
+ public Junction() {
+
+ }
+
public Junction(final int p1,
final int p2,
final int p3,
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Line.java b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Line.java
index 0c424db3448..158900158fd 100644
--- a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Line.java
+++ b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Line.java
@@ -26,6 +26,10 @@ public class Line {
private int p2;
+ public Line() {
+
+ }
+
public Line(final int p1,
final int p2) {
this.p1 = p1;
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Stage.java b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Stage.java
index 252ba075d3a..f5c0ba87174 100644
--- a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Stage.java
+++ b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Stage.java
@@ -51,6 +51,10 @@ public class Stage
private int value;
+ public Stage() {
+
+ }
+
public Stage(final int value) {
this.value = value;
}
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Waltz.java b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Waltz.java
index 4cfe78e93dd..2bca544472d 100644
--- a/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Waltz.java
+++ b/drools-compiler/src/test/java/org/drools/integrationtests/waltz/Waltz.java
@@ -56,10 +56,15 @@ public void testWaltz() {
// workingMemory.addEventListener( agendaListener );
//go !
- //this.loadLines( workingMemory, "waltz12.dat" );
-
- final Stage stage = new Stage( Stage.START );
- workingMemory.assertObject( stage );
+ this.loadLines( workingMemory,
+ "waltz50.dat" );
+
+ //final Stage stage = new Stage( Stage.START );
+ //workingMemory.assertObject( stage );
+
+ Stage stage = new Stage(Stage.DUPLICATE);
+ workingMemory.assertObject( stage );
+
workingMemory.fireAllRules();
} catch ( final Throwable t ) {
t.printStackTrace();
@@ -100,7 +105,7 @@ private void loadLines(final WorkingMemory wm,
final Matcher m = pat.matcher( line );
if ( m.matches() ) {
final Line l = new Line( Integer.parseInt( m.group( 1 ) ),
- Integer.parseInt( m.group( 2 ) ) );
+ Integer.parseInt( m.group( 2 ) ) );
wm.assertObject( l );
}
line = reader.readLine();
|
d703ec89cf73fa94cbd8993b05c5f46ec7bd84ff
|
orientdb
|
Issue -1607 WAL page change tracking was- reimplemented.--
|
p
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/paginated/wal/OPageChanges.java b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/paginated/wal/OPageChanges.java
index a296a502646..0f4a65b7cf1 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/paginated/wal/OPageChanges.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/paginated/wal/OPageChanges.java
@@ -1,368 +1,163 @@
package com.orientechnologies.orient.core.storage.impl.local.paginated.wal;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.ListIterator;
+
import com.orientechnologies.common.directmemory.ODirectMemory;
import com.orientechnologies.common.directmemory.ODirectMemoryFactory;
import com.orientechnologies.common.serialization.types.OIntegerSerializer;
public class OPageChanges {
- private static final int INITIAL_SIZE = 16;
-
- private final ODirectMemory directMemory = ODirectMemoryFactory.INSTANCE.directMemory();
- private ChangesBucket[] changesBuckets = new ChangesBucket[INITIAL_SIZE];
-
- private int size = 0;
-
- public void addChanges(int pageOffset, byte[] newValues, byte[] oldValues) {
- assert newValues.length == oldValues.length;
-
- if (size == 0) {
- changesBuckets[0] = new ChangesBucket(pageOffset, newValues, oldValues);
- size = 1;
- } else {
- ChangesBucket bucketToUse;
- int bucketIndex;
-
- final int insertionIndex = binarySearch(pageOffset);
- if (insertionIndex >= 0) {
- bucketIndex = insertionIndex;
- bucketToUse = changesBuckets[bucketIndex];
- bucketToUse.updateValues(pageOffset, newValues, oldValues);
- } else {
- bucketIndex = -insertionIndex - 1;
-
- if (bucketIndex < size) {
- final ChangesBucket bucket = changesBuckets[bucketIndex];
- if (bucket.startPosition < pageOffset) {
- bucketToUse = bucket;
- bucketToUse.updateValues(pageOffset, newValues, oldValues);
- } else {
- bucketToUse = new ChangesBucket(pageOffset, newValues, oldValues);
- }
- } else {
- bucketToUse = new ChangesBucket(pageOffset, newValues, oldValues);
- }
- }
-
- int shiftBackFrom = -1;
- int shiftBackTo = -1;
-
- int startIndex;
- if (bucketIndex < size && bucketToUse == changesBuckets[bucketIndex]) {
- startIndex = bucketIndex + 1;
- } else {
- startIndex = bucketIndex;
- }
-
- for (int i = startIndex; i < size; i++) {
- ChangesBucket bucketToMerge = changesBuckets[i];
- if (bucketToUse.endPosition >= bucketToMerge.startPosition) {
- bucketToUse.merge(bucketToMerge);
- if (i == startIndex) {
- shiftBackFrom = startIndex;
- shiftBackTo = startIndex;
- } else
- shiftBackTo = i;
- } else
- break;
- }
-
- if (shiftBackFrom == bucketIndex) {
- shiftBackFrom++;
- changesBuckets[bucketIndex] = bucketToUse;
-
- if (shiftBackFrom <= shiftBackTo)
- collapse(shiftBackFrom, shiftBackTo);
- } else {
- if (shiftBackFrom >= 0)
- collapse(shiftBackFrom, shiftBackTo);
- }
-
- if (bucketIndex >= size || bucketToUse != changesBuckets[bucketIndex])
- insert(bucketIndex, bucketToUse);
- }
- }
+ private final ODirectMemory directMemory = ODirectMemoryFactory.INSTANCE.directMemory();
+ private List<ChangeUnit> changeUnits = new ArrayList<ChangeUnit>();
+ private int serializedSize = OIntegerSerializer.INT_SIZE;
- public boolean isEmpty() {
- return size == 0;
- }
+ public void addChanges(int pageOffset, byte[] newValues, byte[] oldValues) {
+ assert newValues.length == oldValues.length;
+ changeUnits.add(new ChangeUnit(pageOffset, oldValues, newValues));
- public void applyChanges(long pointer) {
- for (int i = 0; i < size; i++) {
- ChangesBucket bucket = changesBuckets[i];
- directMemory.set(pointer + bucket.startPosition, bucket.newValues, 0, bucket.newValues.length);
+ serializedSize += compressedIntegerSize(pageOffset) + compressedIntegerSize(newValues.length) + newValues.length
+ + oldValues.length;
}
- }
- public void revertChanges(long pointer) {
- for (int i = 0; i < size; i++) {
- ChangesBucket bucket = changesBuckets[i];
- directMemory.set(pointer + bucket.startPosition, bucket.oldValues, 0, bucket.oldValues.length);
+ public boolean isEmpty() {
+ return changeUnits.isEmpty();
}
- }
-
- private void insert(int bucketIndex, ChangesBucket bucket) {
- assert bucketIndex <= size;
- if (size < changesBuckets.length) {
- System.arraycopy(changesBuckets, bucketIndex, changesBuckets, bucketIndex + 1, size - bucketIndex);
- changesBuckets[bucketIndex] = bucket;
- } else {
- ChangesBucket[] oldChangesBuckets = changesBuckets;
- changesBuckets = new ChangesBucket[changesBuckets.length << 1];
-
- if (bucketIndex > 0)
- System.arraycopy(oldChangesBuckets, 0, changesBuckets, 0, bucketIndex);
-
- if (bucketIndex < size)
- System.arraycopy(oldChangesBuckets, bucketIndex, changesBuckets, bucketIndex + 1, size - bucketIndex);
-
- changesBuckets[bucketIndex] = bucket;
+ public void applyChanges(long pointer) {
+ for (ChangeUnit changeUnit : changeUnits) {
+ directMemory.set(pointer + changeUnit.pageOffset, changeUnit.newValues, 0, changeUnit.newValues.length);
+ }
}
- size++;
- }
-
- private int binarySearch(int startPosition) {
- int low = 0;
- int high = size - 1;
-
- while (low <= high) {
- int mid = (low + high) >>> 1;
- ChangesBucket midBucket = changesBuckets[mid];
- if (midBucket.endPosition < startPosition)
- low = mid + 1;
- else if (midBucket.endPosition > startPosition)
- high = mid - 1;
- else
- return mid;
+ public void revertChanges(long pointer) {
+ ListIterator<ChangeUnit> iterator = changeUnits.listIterator(changeUnits.size());
+ while (iterator.hasPrevious()) {
+ ChangeUnit changeUnit = iterator.previous();
+ directMemory.set(pointer + changeUnit.pageOffset, changeUnit.oldValues, 0, changeUnit.oldValues.length);
+ }
}
- return -(low + 1);
- }
-
- private void collapse(int shiftBackFrom, int shiftBackTo) {
- assert shiftBackTo >= shiftBackFrom;
- int sizeDiff = shiftBackTo - shiftBackFrom + 1;
- if (shiftBackTo < size - 1) {
- System.arraycopy(changesBuckets, shiftBackTo + 1, changesBuckets, shiftBackFrom, size - (shiftBackTo + 1));
-
- for (int i = size - sizeDiff; i < size; i++)
- changesBuckets[i] = null;
- } else {
- for (int i = shiftBackFrom; i <= shiftBackTo; i++)
- changesBuckets[i] = null;
+ public int serializedSize() {
+ return serializedSize;
}
- size -= sizeDiff;
- }
-
- public int serializedSize() {
- int serializedSize = OIntegerSerializer.INT_SIZE;
+ public int toStream(byte[] content, int offset) {
+ OIntegerSerializer.INSTANCE.serializeNative(changeUnits.size(), content, offset);
+ offset += OIntegerSerializer.INT_SIZE;
- serializedSize += compressedIntegerSize(size);
- for (int i = 0; i < size; i++) {
- ChangesBucket changesBucket = changesBuckets[i];
+ for (ChangeUnit changeUnit : changeUnits) {
+ offset = serializeCompressedInteger(content, offset, changeUnit.pageOffset);
+ offset = serializeCompressedInteger(content, offset, changeUnit.newValues.length);
- serializedSize += compressedIntegerSize(changesBucket.startPosition);
- serializedSize += compressedIntegerSize(changesBucket.newValues.length);
-
- assert changesBucket.newValues.length == changesBucket.oldValues.length;
-
- serializedSize += 2 * changesBucket.newValues.length;
- }
+ System.arraycopy(changeUnit.newValues, 0, content, offset, changeUnit.newValues.length);
+ offset += changeUnit.newValues.length;
- return serializedSize;
- }
-
- public int serializedSize(byte[] content, int offset) {
- return OIntegerSerializer.INSTANCE.deserializeNative(content, offset);
- }
-
- public int toStream(byte[] content, int offset) {
- int initialOffset = offset;
-
- offset += OIntegerSerializer.INT_SIZE;
-
- offset = serializeCompressedInteger(content, offset, size);
- for (int i = 0; i < size; i++) {
- ChangesBucket changesBucket = changesBuckets[i];
- offset = serializeCompressedInteger(content, offset, changesBucket.startPosition);
- offset = serializeCompressedInteger(content, offset, changesBucket.newValues.length);
-
- System.arraycopy(changesBucket.newValues, 0, content, offset, changesBucket.newValues.length);
- offset += changesBucket.newValues.length;
+ System.arraycopy(changeUnit.oldValues, 0, content, offset, changeUnit.oldValues.length);
+ offset += changeUnit.oldValues.length;
+ }
- System.arraycopy(changesBucket.oldValues, 0, content, offset, changesBucket.oldValues.length);
- offset += changesBucket.oldValues.length;
+ return offset;
}
- OIntegerSerializer.INSTANCE.serializeNative(offset - initialOffset, content, initialOffset);
-
- return offset;
- }
+ public int fromStream(byte[] content, int offset) {
+ final int changesSize = OIntegerSerializer.INSTANCE.deserializeNative(content, offset);
+ offset += OIntegerSerializer.INT_SIZE;
- public int fromStream(byte[] content, int offset) {
- offset += OIntegerSerializer.INT_SIZE;
+ changeUnits = new ArrayList<ChangeUnit>(changesSize);
- int[] decompressionResult = deserializeCompressedInteger(content, offset);
+ int[] decompressResult;
+ for (int i = 0; i < changesSize; i++) {
+ decompressResult = deserializeCompressedInteger(content, offset);
- size = decompressionResult[0];
- changesBuckets = new ChangesBucket[size];
+ int pageOffset = decompressResult[0];
+ offset = decompressResult[1];
- offset = decompressionResult[1];
- for (int i = 0; i < size; i++) {
- decompressionResult = deserializeCompressedInteger(content, offset);
-
- int startPosition = decompressionResult[0];
- offset = decompressionResult[1];
-
- decompressionResult = deserializeCompressedInteger(content, offset);
- int changesSize = decompressionResult[0];
- offset = decompressionResult[1];
-
- byte[] newValues = new byte[changesSize];
- byte[] oldValues = new byte[changesSize];
-
- System.arraycopy(content, offset, newValues, 0, changesSize);
- offset += changesSize;
-
- System.arraycopy(content, offset, oldValues, 0, changesSize);
- offset += changesSize;
-
- changesBuckets[i] = new ChangesBucket(startPosition, newValues, oldValues);
- }
+ decompressResult = deserializeCompressedInteger(content, offset);
+ int dataLength = decompressResult[0];
+ offset = decompressResult[1];
- return offset;
- }
+ byte[] newValues = new byte[dataLength];
+ System.arraycopy(content, offset, newValues, 0, dataLength);
+ offset += dataLength;
- private int compressedIntegerSize(int value) {
- if (value <= 127)
- return 1;
- if (value <= 16383)
- return 2;
- if (value <= 2097151)
- return 3;
+ byte[] oldValues = new byte[dataLength];
+ System.arraycopy(content, offset, oldValues, 0, dataLength);
+ offset += dataLength;
- throw new IllegalArgumentException("Values more than 2097151 are not supported.");
- }
+ changeUnits.add(new ChangeUnit(pageOffset, oldValues, newValues));
+ }
- private int serializeCompressedInteger(byte[] content, int offset, int value) {
- if (value <= 127) {
- content[offset] = (byte) value;
- return offset + 1;
+ return offset;
}
- if (value <= 16383) {
- content[offset + 1] = (byte) (0xFF & value);
+ private int compressedIntegerSize(int value) {
+ if (value <= 127)
+ return 1;
+ if (value <= 16383)
+ return 2;
+ if (value <= 2097151)
+ return 3;
- value = value >>> 8;
- content[offset] = (byte) (0x80 | value);
- return offset + 2;
+ throw new IllegalArgumentException("Values more than 2097151 are not supported.");
}
- if (value <= 2097151) {
- content[offset + 2] = (byte) (0xFF & value);
- value = value >>> 8;
-
- content[offset + 1] = (byte) (0xFF & value);
- value = value >>> 8;
-
- content[offset] = (byte) (0xC0 | value);
-
- return offset + 3;
- }
-
- throw new IllegalArgumentException("Values more than 2097151 are not supported.");
- }
-
- private int[] deserializeCompressedInteger(byte[] content, int offset) {
- if ((content[offset] & 0x80) == 0)
- return new int[] { content[offset], offset + 1 };
+ private int serializeCompressedInteger(byte[] content, int offset, int value) {
+ if (value <= 127) {
+ content[offset] = (byte) value;
+ return offset + 1;
+ }
- if ((content[offset] & 0xC0) == 0x80) {
- final int value = (0xFF & content[offset + 1]) | ((content[offset] & 0x3F) << 8);
- return new int[] { value, offset + 2 };
- }
+ if (value <= 16383) {
+ content[offset + 1] = (byte) (0xFF & value);
- if ((content[offset] & 0xE0) == 0xC0) {
- final int value = (0xFF & content[offset + 2]) | ((0xFF & content[offset + 1]) << 8) | ((content[offset] & 0x1F) << 16);
- return new int[] { value, offset + 3 };
- }
+ value = value >>> 8;
+ content[offset] = (byte) (0x80 | value);
+ return offset + 2;
+ }
- throw new IllegalArgumentException("Invalid integer format.");
- }
+ if (value <= 2097151) {
+ content[offset + 2] = (byte) (0xFF & value);
+ value = value >>> 8;
- private static final class ChangesBucket {
- private final int startPosition;
- private int endPosition;
+ content[offset + 1] = (byte) (0xFF & value);
+ value = value >>> 8;
- private byte[] newValues;
- private byte[] oldValues;
+ content[offset] = (byte) (0xC0 | value);
- private ChangesBucket(int startPosition, byte[] newValues, byte[] oldValues) {
- assert newValues.length == oldValues.length;
+ return offset + 3;
+ }
- this.startPosition = startPosition;
- this.endPosition = startPosition + newValues.length;
- this.newValues = newValues;
- this.oldValues = oldValues;
+ throw new IllegalArgumentException("Values more than 2097151 are not supported.");
}
- public void updateValues(int startPosition, byte[] newValues, byte[] oldValues) {
- assert startPosition <= this.endPosition;
- assert startPosition >= this.startPosition;
- assert newValues.length == oldValues.length;
-
- int endPosition = startPosition + newValues.length;
-
- if (endPosition > this.endPosition) {
- int lenDiff = endPosition - this.endPosition;
+ private int[] deserializeCompressedInteger(byte[] content, int offset) {
+ if ((content[offset] & 0x80) == 0)
+ return new int[]{content[offset], offset + 1};
- byte[] oldNewValues = this.newValues;
- byte[] oldOldValues = this.oldValues;
-
- this.newValues = new byte[this.newValues.length + lenDiff];
- System.arraycopy(oldNewValues, 0, this.newValues, 0, oldNewValues.length);
-
- this.oldValues = new byte[this.oldValues.length + lenDiff];
- System.arraycopy(oldOldValues, 0, this.oldValues, 0, oldOldValues.length);
-
- System.arraycopy(oldValues, oldValues.length - lenDiff, this.oldValues, this.oldValues.length - lenDiff, lenDiff);
+ if ((content[offset] & 0xC0) == 0x80) {
+ final int value = (0xFF & content[offset + 1]) | ((content[offset] & 0x3F) << 8);
+ return new int[]{value, offset + 2};
+ }
- this.endPosition = endPosition;
- }
+ if ((content[offset] & 0xE0) == 0xC0) {
+ final int value = (0xFF & content[offset + 2]) | ((0xFF & content[offset + 1]) << 8) | ((content[offset] & 0x1F) << 16);
+ return new int[]{value, offset + 3};
+ }
- final int dataOffset = startPosition - this.startPosition;
- System.arraycopy(newValues, 0, this.newValues, dataOffset, newValues.length);
+ throw new IllegalArgumentException("Invalid integer format.");
}
- public void merge(ChangesBucket bucketToMerge) {
- assert bucketToMerge.startPosition <= endPosition;
- assert bucketToMerge.startPosition >= startPosition;
-
- if (endPosition < bucketToMerge.endPosition) {
- int newValuesDiff = bucketToMerge.endPosition - this.endPosition;
-
- byte[] oldNewValues = this.newValues;
- byte[] oldOldValues = this.oldValues;
-
- this.newValues = new byte[this.newValues.length + newValuesDiff];
- System.arraycopy(oldNewValues, 0, this.newValues, 0, oldNewValues.length);
-
- this.oldValues = new byte[this.oldValues.length + newValuesDiff];
- System.arraycopy(oldOldValues, 0, this.oldValues, 0, oldOldValues.length);
-
- System.arraycopy(bucketToMerge.newValues, bucketToMerge.newValues.length - newValuesDiff, this.newValues,
- this.newValues.length - newValuesDiff, newValuesDiff);
-
- this.endPosition = bucketToMerge.endPosition;
- }
-
- int oldValuesFrom = bucketToMerge.startPosition - this.startPosition;
-
- assert oldValuesFrom + bucketToMerge.oldValues.length <= this.oldValues.length;
- System.arraycopy(bucketToMerge.oldValues, 0, this.oldValues, oldValuesFrom, bucketToMerge.oldValues.length);
+ private final static class ChangeUnit {
+ private final int pageOffset;
+ private final byte[] oldValues;
+ private final byte[] newValues;
+ private ChangeUnit(int pageOffset, byte[] oldValues, byte[] newValues) {
+ this.pageOffset = pageOffset;
+ this.oldValues = oldValues;
+ this.newValues = newValues;
+ }
}
- }
}
|
d5c271acf51b504f2316c4a18597dbe81a67c6a9
|
elasticsearch
|
clean-up long values--
|
p
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/main/java/org/elasticsearch/index/fielddata/DoubleValues.java b/src/main/java/org/elasticsearch/index/fielddata/DoubleValues.java
index d6ab9e6d2e3dc..5382220525179 100644
--- a/src/main/java/org/elasticsearch/index/fielddata/DoubleValues.java
+++ b/src/main/java/org/elasticsearch/index/fielddata/DoubleValues.java
@@ -20,6 +20,7 @@
package org.elasticsearch.index.fielddata;
import org.elasticsearch.ElasticSearchIllegalStateException;
+import org.elasticsearch.index.fielddata.LongValues.Iter;
import org.elasticsearch.index.fielddata.util.DoubleArrayRef;
import org.elasticsearch.index.fielddata.util.IntArrayRef;
import org.elasticsearch.index.fielddata.util.LongArrayRef;
@@ -136,7 +137,6 @@ public static class LongBased implements DoubleValues {
private final LongValues values;
private final ValueIter iter = new ValueIter();
- private final Proc proc = new Proc();
public LongBased(LongValues values) {
this.values = values;
@@ -172,7 +172,14 @@ public Iter getIter(int docId) {
@Override
public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- values.forEachValueInDoc(docId, this.proc.reset(proc));
+ if (values.hasValue(docId)) {
+ final LongValues.Iter longIter = values.getIter(docId);
+ while(longIter.hasNext()) {
+ proc.onValue(docId, longIter.next());
+ }
+ } else {
+ proc.onMissing(docId);
+ }
}
static class ValueIter implements Iter {
@@ -195,26 +202,6 @@ public double next() {
}
}
- static class Proc implements LongValues.ValueInDocProc {
-
- private ValueInDocProc proc;
-
- private Proc reset(ValueInDocProc proc) {
- this.proc = proc;
- return this;
- }
-
- @Override
- public void onValue(int docId, long value) {
- this.proc.onValue(docId, (double) value);
- }
-
- @Override
- public void onMissing(int docId) {
- this.proc.onMissing(docId);
- }
- }
-
}
public static class FilteredDoubleValues implements DoubleValues {
diff --git a/src/main/java/org/elasticsearch/index/fielddata/LongValues.java b/src/main/java/org/elasticsearch/index/fielddata/LongValues.java
index 1dec9ad5cea6b..c32ad15ce47b2 100644
--- a/src/main/java/org/elasticsearch/index/fielddata/LongValues.java
+++ b/src/main/java/org/elasticsearch/index/fielddata/LongValues.java
@@ -20,46 +20,116 @@
package org.elasticsearch.index.fielddata;
import org.elasticsearch.ElasticSearchIllegalStateException;
-import org.elasticsearch.index.fielddata.util.LongArrayRef;
+import org.elasticsearch.index.fielddata.ordinals.Ordinals;
+import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs;
/**
*/
-public interface LongValues {
+public abstract class LongValues {
- static final LongValues EMPTY = new Empty();
+ public static final LongValues EMPTY = new Empty();
+ private final boolean multiValued;
+ protected final Iter.Single iter = new Iter.Single();
+
+
+ protected LongValues(boolean multiValued) {
+ this.multiValued = multiValued;
+ }
/**
* Is one of the documents in this field data values is multi valued?
*/
- boolean isMultiValued();
+ public final boolean isMultiValued() {
+ return multiValued;
+ }
/**
* Is there a value for this doc?
*/
- boolean hasValue(int docId);
+ public abstract boolean hasValue(int docId);
- long getValue(int docId);
+ public abstract long getValue(int docId);
- long getValueMissing(int docId, long missingValue);
+ public long getValueMissing(int docId, long missingValue) {
+ if (hasValue(docId)) {
+ return getValue(docId);
+ }
+ return missingValue;
+ }
+
+ public Iter getIter(int docId) {
+ assert !isMultiValued();
+ if (hasValue(docId)) {
+ return iter.reset(getValue(docId));
+ } else {
+ return Iter.Empty.INSTANCE;
+ }
+ }
- Iter getIter(int docId);
+
+ public static abstract class DenseLongValues extends LongValues {
+
+
+ protected DenseLongValues(boolean multiValued) {
+ super(multiValued);
+ }
+
+ @Override
+ public final boolean hasValue(int docId) {
+ return true;
+ }
- void forEachValueInDoc(int docId, ValueInDocProc proc);
+ public final long getValueMissing(int docId, long missingValue) {
+ assert hasValue(docId);
+ assert !isMultiValued();
+ return getValue(docId);
+ }
+
+ public final Iter getIter(int docId) {
+ assert hasValue(docId);
+ assert !isMultiValued();
+ return iter.reset(getValue(docId));
+ }
+
+ }
+
+ public static abstract class OrdBasedLongValues extends LongValues {
+
+ protected final Docs ordinals;
+ private final Iter.Multi iter;
+
+ protected OrdBasedLongValues(Ordinals.Docs ordinals) {
+ super(ordinals.isMultiValued());
+ this.ordinals = ordinals;
+ iter = new Iter.Multi(this);
+ }
+
+ @Override
+ public final boolean hasValue(int docId) {
+ return ordinals.getOrd(docId) != 0;
+ }
- static interface ValueInDocProc {
+ @Override
+ public final long getValue(int docId) {
+ return getByOrd(ordinals.getOrd(docId));
+ }
+
+ protected abstract long getByOrd(int ord);
- void onValue(int docId, long value);
+ @Override
+ public final Iter getIter(int docId) {
+ return iter.reset(ordinals.getIter(docId));
+ }
- void onMissing(int docId);
}
- static interface Iter {
+ public static interface Iter {
boolean hasNext();
long next();
- static class Empty implements Iter {
+ public static class Empty implements Iter {
public static final Empty INSTANCE = new Empty();
@@ -74,7 +144,7 @@ public long next() {
}
}
- static class Single implements Iter {
+ static class Single implements Iter {
public long value;
public boolean done;
@@ -97,12 +167,41 @@ public long next() {
return value;
}
}
+
+ static class Multi implements Iter {
+
+ private org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs.Iter ordsIter;
+ private int ord;
+ private OrdBasedLongValues values;
+
+ public Multi(OrdBasedLongValues values) {
+ this.values = values;
+ }
+
+ public Multi reset(Ordinals.Docs.Iter ordsIter) {
+ this.ordsIter = ordsIter;
+ this.ord = ordsIter.next();
+ return this;
+ }
+
+ @Override
+ public boolean hasNext() {
+ return ord != 0;
+ }
+
+ @Override
+ public long next() {
+ long value = values.getByOrd(ord);
+ ord = ordsIter.next();
+ return value;
+ }
+ }
}
- static class Empty implements LongValues {
- @Override
- public boolean isMultiValued() {
- return false;
+ static class Empty extends LongValues {
+
+ public Empty() {
+ super(false);
}
@Override
@@ -115,34 +214,22 @@ public long getValue(int docId) {
throw new ElasticSearchIllegalStateException("Can't retrieve a value from an empty LongValues");
}
- @Override
- public long getValueMissing(int docId, long missingValue) {
- return missingValue;
- }
-
@Override
public Iter getIter(int docId) {
return Iter.Empty.INSTANCE;
}
- @Override
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- proc.onMissing(docId);
- }
}
- public static class FilteredLongValues implements LongValues {
+ public static class FilteredLongValues extends LongValues {
protected final LongValues delegate;
public FilteredLongValues(LongValues delegate) {
+ super(delegate.isMultiValued());
this.delegate = delegate;
}
- public boolean isMultiValued() {
- return delegate.isMultiValued();
- }
-
public boolean hasValue(int docId) {
return delegate.hasValue(docId);
}
@@ -151,17 +238,9 @@ public long getValue(int docId) {
return delegate.getValue(docId);
}
- public long getValueMissing(int docId, long missingValue) {
- return delegate.getValueMissing(docId, missingValue);
- }
-
public Iter getIter(int docId) {
return delegate.getIter(docId);
}
-
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- delegate.forEachValueInDoc(docId, proc);
- }
}
}
diff --git a/src/main/java/org/elasticsearch/index/fielddata/StringValues.java b/src/main/java/org/elasticsearch/index/fielddata/StringValues.java
index 9cf2ca6addb52..77860e1cdd613 100644
--- a/src/main/java/org/elasticsearch/index/fielddata/StringValues.java
+++ b/src/main/java/org/elasticsearch/index/fielddata/StringValues.java
@@ -222,7 +222,6 @@ public static class LongBased implements StringValues {
private final StringArrayRef arrayScratch = new StringArrayRef(new String[1], 1);
private final ValuesIter valuesIter = new ValuesIter();
- private final Proc proc = new Proc();
public LongBased(LongValues values) {
this.values = values;
@@ -254,7 +253,14 @@ public Iter getIter(int docId) {
@Override
public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- values.forEachValueInDoc(docId, this.proc.reset(proc));
+ if (values.hasValue(docId)) {
+ final LongValues.Iter longIter = values.getIter(docId);
+ while(longIter.hasNext()) {
+ proc.onValue(docId, Long.toString(longIter.next()));
+ }
+ } else {
+ proc.onMissing(docId);
+ }
}
static class ValuesIter implements Iter {
@@ -277,25 +283,7 @@ public String next() {
}
}
- static class Proc implements LongValues.ValueInDocProc {
-
- private ValueInDocProc proc;
-
- private Proc reset(ValueInDocProc proc) {
- this.proc = proc;
- return this;
- }
-
- @Override
- public void onValue(int docId, long value) {
- proc.onValue(docId, Long.toString(value));
- }
-
- @Override
- public void onMissing(int docId) {
- proc.onMissing(docId);
- }
- }
+
}
public interface WithOrdinals extends StringValues {
diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/ByteArrayAtomicFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/ByteArrayAtomicFieldData.java
index 565d0b89ba3eb..d84a072eef05f 100644
--- a/src/main/java/org/elasticsearch/index/fielddata/plain/ByteArrayAtomicFieldData.java
+++ b/src/main/java/org/elasticsearch/index/fielddata/plain/ByteArrayAtomicFieldData.java
@@ -140,90 +140,20 @@ public DoubleValues getDoubleValues() {
return new DoubleValues(values, ordinals.ordinals());
}
- static class LongValues implements org.elasticsearch.index.fielddata.LongValues {
+ static class LongValues extends org.elasticsearch.index.fielddata.LongValues.OrdBasedLongValues {
private final byte[] values;
- private final Ordinals.Docs ordinals;
-
- private final ValuesIter iter;
LongValues(byte[] values, Ordinals.Docs ordinals) {
+ super(ordinals);
this.values = values;
- this.ordinals = ordinals;
- this.iter = new ValuesIter(values);
- }
-
- @Override
- public boolean isMultiValued() {
- return ordinals.isMultiValued();
- }
-
- @Override
- public boolean hasValue(int docId) {
- return ordinals.getOrd(docId) != 0;
}
@Override
- public long getValue(int docId) {
- return (long) values[ordinals.getOrd(docId)];
+ protected long getByOrd(int ord) {
+ return (long) values[ord];
}
- @Override
- public long getValueMissing(int docId, long missingValue) {
- int ord = ordinals.getOrd(docId);
- if (ord == 0) {
- return missingValue;
- } else {
- return (long) values[ord];
- }
- }
-
- @Override
- public Iter getIter(int docId) {
- return iter.reset(ordinals.getIter(docId));
- }
-
- @Override
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- Ordinals.Docs.Iter iter = ordinals.getIter(docId);
- int ord = iter.next();
- if (ord == 0) {
- proc.onMissing(docId);
- return;
- }
- do {
- proc.onValue(docId, (long) values[ord]);
- } while ((ord = iter.next()) != 0);
- }
-
- static class ValuesIter implements Iter {
-
- private final byte[] values;
- private Ordinals.Docs.Iter ordsIter;
- private int ord;
-
- ValuesIter(byte[] values) {
- this.values = values;
- }
-
- public ValuesIter reset(Ordinals.Docs.Iter ordsIter) {
- this.ordsIter = ordsIter;
- this.ord = ordsIter.next();
- return this;
- }
-
- @Override
- public boolean hasNext() {
- return ord != 0;
- }
-
- @Override
- public long next() {
- byte value = values[ord];
- ord = ordsIter.next();
- return (long) value;
- }
- }
}
static class DoubleValues implements org.elasticsearch.index.fielddata.DoubleValues {
@@ -370,23 +300,17 @@ public DoubleValues getDoubleValues() {
return new DoubleValues(values, set);
}
- static class LongValues implements org.elasticsearch.index.fielddata.LongValues {
+ static class LongValues extends org.elasticsearch.index.fielddata.LongValues {
private final byte[] values;
private final FixedBitSet set;
- private final Iter.Single iter = new Iter.Single();
-
LongValues(byte[] values, FixedBitSet set) {
+ super(false);
this.values = values;
this.set = set;
}
- @Override
- public boolean isMultiValued() {
- return false;
- }
-
@Override
public boolean hasValue(int docId) {
return set.get(docId);
@@ -397,32 +321,6 @@ public long getValue(int docId) {
return (long) values[docId];
}
- @Override
- public long getValueMissing(int docId, long missingValue) {
- if (set.get(docId)) {
- return (long) values[docId];
- } else {
- return missingValue;
- }
- }
-
- @Override
- public Iter getIter(int docId) {
- if (set.get(docId)) {
- return iter.reset((long) values[docId]);
- } else {
- return Iter.Empty.INSTANCE;
- }
- }
-
- @Override
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- if (set.get(docId)) {
- proc.onValue(docId, (long) values[docId]);
- } else {
- proc.onMissing(docId);
- }
- }
}
static class DoubleValues implements org.elasticsearch.index.fielddata.DoubleValues {
@@ -538,44 +436,21 @@ public DoubleValues getDoubleValues() {
}
- static class LongValues implements org.elasticsearch.index.fielddata.LongValues {
+ static class LongValues extends org.elasticsearch.index.fielddata.LongValues.DenseLongValues {
private final byte[] values;
- private final Iter.Single iter = new Iter.Single();
LongValues(byte[] values) {
+ super(false);
this.values = values;
}
- @Override
- public boolean isMultiValued() {
- return false;
- }
-
- @Override
- public boolean hasValue(int docId) {
- return true;
- }
@Override
public long getValue(int docId) {
return (long) values[docId];
}
- @Override
- public long getValueMissing(int docId, long missingValue) {
- return (long) values[docId];
- }
-
- @Override
- public Iter getIter(int docId) {
- return iter.reset((long) values[docId]);
- }
-
- @Override
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- proc.onValue(docId, (long) values[docId]);
- }
}
static class DoubleValues implements org.elasticsearch.index.fielddata.DoubleValues {
diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/DoubleArrayAtomicFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/DoubleArrayAtomicFieldData.java
index 6e31d5abf9030..11375dbd8c223 100644
--- a/src/main/java/org/elasticsearch/index/fielddata/plain/DoubleArrayAtomicFieldData.java
+++ b/src/main/java/org/elasticsearch/index/fielddata/plain/DoubleArrayAtomicFieldData.java
@@ -232,88 +232,18 @@ public String next() {
}
}
- static class LongValues implements org.elasticsearch.index.fielddata.LongValues {
+ static class LongValues extends org.elasticsearch.index.fielddata.LongValues.OrdBasedLongValues {
private final double[] values;
- private final Ordinals.Docs ordinals;
- private final ValuesIter iter;
LongValues(double[] values, Ordinals.Docs ordinals) {
+ super(ordinals);
this.values = values;
- this.ordinals = ordinals;
- this.iter = new ValuesIter(values);
- }
-
- @Override
- public boolean isMultiValued() {
- return ordinals.isMultiValued();
- }
-
- @Override
- public boolean hasValue(int docId) {
- return ordinals.getOrd(docId) != 0;
- }
-
- @Override
- public long getValue(int docId) {
- return (long) values[ordinals.getOrd(docId)];
}
@Override
- public long getValueMissing(int docId, long missingValue) {
- int ord = ordinals.getOrd(docId);
- if (ord == 0) {
- return missingValue;
- } else {
- return (long) values[ord];
- }
- }
-
- @Override
- public Iter getIter(int docId) {
- return iter.reset(ordinals.getIter(docId));
- }
-
- @Override
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- Ordinals.Docs.Iter iter = ordinals.getIter(docId);
- int ord = iter.next();
- if (ord == 0) {
- proc.onMissing(docId);
- return;
- }
- do {
- proc.onValue(docId, (long) values[ord]);
- } while ((ord = iter.next()) != 0);
- }
-
- static class ValuesIter implements LongValues.Iter {
-
- private final double[] values;
- private Ordinals.Docs.Iter ordsIter;
- private int ord;
-
- ValuesIter(double[] values) {
- this.values = values;
- }
-
- public ValuesIter reset(Ordinals.Docs.Iter ordsIter) {
- this.ordsIter = ordsIter;
- this.ord = ordsIter.next();
- return this;
- }
-
- @Override
- public boolean hasNext() {
- return ord != 0;
- }
-
- @Override
- public long next() {
- double value = values[ord];
- ord = ordsIter.next();
- return (long) value;
- }
+ protected final long getByOrd(int ord) {
+ return (long)values[ord];
}
}
@@ -459,23 +389,17 @@ public DoubleValues getDoubleValues() {
return new DoubleValues(values, set);
}
- static class LongValues implements org.elasticsearch.index.fielddata.LongValues {
+ static class LongValues extends org.elasticsearch.index.fielddata.LongValues {
private final double[] values;
private final FixedBitSet set;
- private final Iter.Single iter = new Iter.Single();
-
LongValues(double[] values, FixedBitSet set) {
+ super(false);
this.values = values;
this.set = set;
}
- @Override
- public boolean isMultiValued() {
- return false;
- }
-
@Override
public boolean hasValue(int docId) {
return set.get(docId);
@@ -485,31 +409,6 @@ public boolean hasValue(int docId) {
public long getValue(int docId) {
return (long) values[docId];
}
-
- @Override
- public long getValueMissing(int docId, long missingValue) {
- if (set.get(docId)) {
- return (long) values[docId];
- } else {
- return missingValue;
- }
- }
-
- @Override
- public Iter getIter(int docId) {
- if (set.get(docId)) {
- return iter.reset((long) values[docId]);
- } else {
- return Iter.Empty.INSTANCE;
- }
- }
-
- @Override
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- if (set.get(docId)) {
- proc.onValue(docId, (long) values[docId]);
- }
- }
}
static class DoubleValues implements org.elasticsearch.index.fielddata.DoubleValues {
@@ -656,45 +555,20 @@ public void forEachValueInDoc(int docId, ValueInDocProc proc) {
}
}
- static class LongValues implements org.elasticsearch.index.fielddata.LongValues {
+ static class LongValues extends org.elasticsearch.index.fielddata.LongValues.DenseLongValues {
private final double[] values;
- private final Iter.Single iter = new Iter.Single();
-
LongValues(double[] values) {
+ super(false);
this.values = values;
}
- @Override
- public boolean isMultiValued() {
- return false;
- }
-
- @Override
- public boolean hasValue(int docId) {
- return true;
- }
-
@Override
public long getValue(int docId) {
return (long) values[docId];
}
- @Override
- public long getValueMissing(int docId, long missingValue) {
- return (long) values[docId];
- }
-
- @Override
- public Iter getIter(int docId) {
- return iter.reset((long) values[docId]);
- }
-
- @Override
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- proc.onValue(docId, (long) values[docId]);
- }
}
static class DoubleValues implements org.elasticsearch.index.fielddata.DoubleValues {
diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/FloatArrayAtomicFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/FloatArrayAtomicFieldData.java
index 168d1f282b079..af84e156c1460 100644
--- a/src/main/java/org/elasticsearch/index/fielddata/plain/FloatArrayAtomicFieldData.java
+++ b/src/main/java/org/elasticsearch/index/fielddata/plain/FloatArrayAtomicFieldData.java
@@ -146,88 +146,18 @@ public DoubleValues getDoubleValues() {
return new DoubleValues(values, ordinals.ordinals());
}
- static class LongValues implements org.elasticsearch.index.fielddata.LongValues {
+ static class LongValues extends org.elasticsearch.index.fielddata.LongValues.OrdBasedLongValues {
private final float[] values;
- private final Ordinals.Docs ordinals;
- private final ValuesIter iter;
LongValues(float[] values, Ordinals.Docs ordinals) {
+ super(ordinals);
this.values = values;
- this.ordinals = ordinals;
- this.iter = new ValuesIter(values);
- }
-
- @Override
- public boolean isMultiValued() {
- return ordinals.isMultiValued();
- }
-
- @Override
- public boolean hasValue(int docId) {
- return ordinals.getOrd(docId) != 0;
- }
-
- @Override
- public long getValue(int docId) {
- return (long) values[ordinals.getOrd(docId)];
}
@Override
- public long getValueMissing(int docId, long missingValue) {
- int ord = ordinals.getOrd(docId);
- if (ord == 0) {
- return missingValue;
- } else {
- return (long) values[ord];
- }
- }
-
- @Override
- public Iter getIter(int docId) {
- return iter.reset(ordinals.getIter(docId));
- }
-
- @Override
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- Ordinals.Docs.Iter iter = ordinals.getIter(docId);
- int ord = iter.next();
- if (ord == 0) {
- proc.onMissing(docId);
- return;
- }
- do {
- proc.onValue(docId, (long) values[ord]);
- } while ((ord = iter.next()) != 0);
- }
-
- static class ValuesIter implements Iter {
-
- private final float[] values;
- private Ordinals.Docs.Iter ordsIter;
- private int ord;
-
- ValuesIter(float[] values) {
- this.values = values;
- }
-
- public ValuesIter reset(Ordinals.Docs.Iter ordsIter) {
- this.ordsIter = ordsIter;
- this.ord = ordsIter.next();
- return this;
- }
-
- @Override
- public boolean hasNext() {
- return ord != 0;
- }
-
- @Override
- public long next() {
- float value = values[ord];
- ord = ordsIter.next();
- return (long) value;
- }
+ public long getByOrd(int ord) {
+ return (long) values[ord];
}
}
@@ -374,22 +304,17 @@ public DoubleValues getDoubleValues() {
}
- static class LongValues implements org.elasticsearch.index.fielddata.LongValues {
+ static class LongValues extends org.elasticsearch.index.fielddata.LongValues {
private final float[] values;
private final FixedBitSet set;
- private final Iter.Single iter = new Iter.Single();
LongValues(float[] values, FixedBitSet set) {
+ super(false);
this.values = values;
this.set = set;
}
- @Override
- public boolean isMultiValued() {
- return false;
- }
-
@Override
public boolean hasValue(int docId) {
return set.get(docId);
@@ -399,33 +324,6 @@ public boolean hasValue(int docId) {
public long getValue(int docId) {
return (long) values[docId];
}
-
- @Override
- public long getValueMissing(int docId, long missingValue) {
- if (set.get(docId)) {
- return (long) values[docId];
- } else {
- return missingValue;
- }
- }
-
- @Override
- public Iter getIter(int docId) {
- if (set.get(docId)) {
- return iter.reset((long) values[docId]);
- } else {
- return Iter.Empty.INSTANCE;
- }
- }
-
- @Override
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- if (set.get(docId)) {
- proc.onValue(docId, (long) values[docId]);
- } else {
- proc.onMissing(docId);
- }
- }
}
static class DoubleValues implements org.elasticsearch.index.fielddata.DoubleValues {
@@ -541,44 +439,20 @@ public DoubleValues getDoubleValues() {
}
- static class LongValues implements org.elasticsearch.index.fielddata.LongValues {
+ static class LongValues extends org.elasticsearch.index.fielddata.LongValues.DenseLongValues {
private final float[] values;
- private final Iter.Single iter = new Iter.Single();
LongValues(float[] values) {
+ super(false);
this.values = values;
}
- @Override
- public boolean isMultiValued() {
- return false;
- }
-
- @Override
- public boolean hasValue(int docId) {
- return true;
- }
-
@Override
public long getValue(int docId) {
return (long) values[docId];
}
- @Override
- public long getValueMissing(int docId, long missingValue) {
- return (long) values[docId];
- }
-
- @Override
- public Iter getIter(int docId) {
- return iter.reset((long) values[docId]);
- }
-
- @Override
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- proc.onValue(docId, (long) values[docId]);
- }
}
static class DoubleValues implements org.elasticsearch.index.fielddata.DoubleValues {
diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/IntArrayAtomicFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/IntArrayAtomicFieldData.java
index 7b7e8ab706a8c..269a96dc308d1 100644
--- a/src/main/java/org/elasticsearch/index/fielddata/plain/IntArrayAtomicFieldData.java
+++ b/src/main/java/org/elasticsearch/index/fielddata/plain/IntArrayAtomicFieldData.java
@@ -148,89 +148,20 @@ public DoubleValues getDoubleValues() {
return new DoubleValues(values, ordinals.ordinals());
}
- static class LongValues implements org.elasticsearch.index.fielddata.LongValues {
+ static class LongValues extends org.elasticsearch.index.fielddata.LongValues.OrdBasedLongValues {
private final int[] values;
- private final Ordinals.Docs ordinals;
- private final ValuesIter iter;
LongValues(int[] values, Ordinals.Docs ordinals) {
+ super(ordinals);
this.values = values;
- this.ordinals = ordinals;
- this.iter = new ValuesIter(values);
- }
-
- @Override
- public boolean isMultiValued() {
- return ordinals.isMultiValued();
- }
-
- @Override
- public boolean hasValue(int docId) {
- return ordinals.getOrd(docId) != 0;
- }
-
- @Override
- public long getValue(int docId) {
- return (long) values[ordinals.getOrd(docId)];
}
@Override
- public long getValueMissing(int docId, long missingValue) {
- int ord = ordinals.getOrd(docId);
- if (ord == 0) {
- return missingValue;
- } else {
- return (long) values[ord];
- }
+ public long getByOrd(int ord) {
+ return (long) values[ord];
}
- @Override
- public Iter getIter(int docId) {
- return iter.reset(ordinals.getIter(docId));
- }
-
- @Override
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- Ordinals.Docs.Iter iter = ordinals.getIter(docId);
- int ord = iter.next();
- if (ord == 0) {
- proc.onMissing(docId);
- return;
- }
- do {
- proc.onValue(docId, (long) values[ord]);
- } while ((ord = iter.next()) != 0);
- }
-
- static class ValuesIter implements Iter {
-
- private final int[] values;
- private Ordinals.Docs.Iter ordsIter;
- private int ord;
-
- ValuesIter(int[] values) {
- this.values = values;
- }
-
- public ValuesIter reset(Ordinals.Docs.Iter ordsIter) {
- this.ordsIter = ordsIter;
- this.ord = ordsIter.next();
- return this;
- }
-
- @Override
- public boolean hasNext() {
- return ord != 0;
- }
-
- @Override
- public long next() {
- int value = values[ord];
- ord = ordsIter.next();
- return (long) value;
- }
- }
}
static class DoubleValues implements org.elasticsearch.index.fielddata.DoubleValues {
@@ -375,22 +306,17 @@ public DoubleValues getDoubleValues() {
return new DoubleValues(values, set);
}
- static class LongValues implements org.elasticsearch.index.fielddata.LongValues {
+ static class LongValues extends org.elasticsearch.index.fielddata.LongValues {
private final int[] values;
private final FixedBitSet set;
- private final Iter.Single iter = new Iter.Single();
LongValues(int[] values, FixedBitSet set) {
+ super(false);
this.values = values;
this.set = set;
}
- @Override
- public boolean isMultiValued() {
- return false;
- }
-
@Override
public boolean hasValue(int docId) {
return set.get(docId);
@@ -401,32 +327,6 @@ public long getValue(int docId) {
return (long) values[docId];
}
- @Override
- public long getValueMissing(int docId, long missingValue) {
- if (set.get(docId)) {
- return (long) values[docId];
- } else {
- return missingValue;
- }
- }
-
- @Override
- public Iter getIter(int docId) {
- if (set.get(docId)) {
- return iter.reset((long) values[docId]);
- } else {
- return Iter.Empty.INSTANCE;
- }
- }
-
- @Override
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- if (set.get(docId)) {
- proc.onValue(docId, (long) values[docId]);
- } else {
- proc.onMissing(docId);
- }
- }
}
static class DoubleValues implements org.elasticsearch.index.fielddata.DoubleValues {
@@ -540,45 +440,21 @@ public DoubleValues getDoubleValues() {
return new DoubleValues(values);
}
- static class LongValues implements org.elasticsearch.index.fielddata.LongValues {
+ static class LongValues extends org.elasticsearch.index.fielddata.LongValues.DenseLongValues {
private final int[] values;
- private final Iter.Single iter = new Iter.Single();
LongValues(int[] values) {
+ super(false);
assert values.length != 0;
this.values = values;
}
-
- @Override
- public boolean isMultiValued() {
- return false;
- }
-
- @Override
- public boolean hasValue(int docId) {
- return true;
- }
-
+
@Override
public long getValue(int docId) {
return (long) values[docId];
}
- @Override
- public long getValueMissing(int docId, long missingValue) {
- return (long) values[docId];
- }
-
- @Override
- public Iter getIter(int docId) {
- return iter.reset((long) values[docId]);
- }
-
- @Override
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- proc.onValue(docId, (long) values[docId]);
- }
}
static class DoubleValues implements org.elasticsearch.index.fielddata.DoubleValues {
diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/LongArrayAtomicFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/LongArrayAtomicFieldData.java
index 2a4237ec4481a..0b392ed2d7a94 100644
--- a/src/main/java/org/elasticsearch/index/fielddata/plain/LongArrayAtomicFieldData.java
+++ b/src/main/java/org/elasticsearch/index/fielddata/plain/LongArrayAtomicFieldData.java
@@ -226,88 +226,18 @@ public String next() {
}
}
- static class LongValues implements org.elasticsearch.index.fielddata.LongValues {
+ static class LongValues extends org.elasticsearch.index.fielddata.LongValues.OrdBasedLongValues {
private final long[] values;
- private final Ordinals.Docs ordinals;
- private final ValuesIter iter;
LongValues(long[] values, Ordinals.Docs ordinals) {
+ super(ordinals);
this.values = values;
- this.ordinals = ordinals;
- this.iter = new ValuesIter(values);
- }
-
- @Override
- public boolean isMultiValued() {
- return ordinals.isMultiValued();
- }
-
- @Override
- public boolean hasValue(int docId) {
- return ordinals.getOrd(docId) != 0;
- }
-
- @Override
- public long getValue(int docId) {
- return values[ordinals.getOrd(docId)];
}
-
+
@Override
- public long getValueMissing(int docId, long missingValue) {
- int ord = ordinals.getOrd(docId);
- if (ord == 0) {
- return missingValue;
- } else {
- return values[ord];
- }
- }
-
- @Override
- public Iter getIter(int docId) {
- return iter.reset(ordinals.getIter(docId));
- }
-
- @Override
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- Ordinals.Docs.Iter iter = ordinals.getIter(docId);
- int ord = iter.next();
- if (ord == 0) {
- proc.onMissing(docId);
- return;
- }
- do {
- proc.onValue(docId, values[ord]);
- } while ((ord = iter.next()) != 0);
- }
-
- static class ValuesIter implements Iter {
-
- private final long[] values;
- private Ordinals.Docs.Iter ordsIter;
- private int ord;
-
- ValuesIter(long[] values) {
- this.values = values;
- }
-
- public ValuesIter reset(Ordinals.Docs.Iter ordsIter) {
- this.ordsIter = ordsIter;
- this.ord = ordsIter.next();
- return this;
- }
-
- @Override
- public boolean hasNext() {
- return ord != 0;
- }
-
- @Override
- public long next() {
- long value = values[ord];
- ord = ordsIter.next();
- return value;
- }
+ public long getByOrd(int ord) {
+ return values[ord];
}
}
@@ -502,22 +432,17 @@ public void forEachValueInDoc(int docId, ValueInDocProc proc) {
}
}
- static class LongValues implements org.elasticsearch.index.fielddata.LongValues {
+ static class LongValues extends org.elasticsearch.index.fielddata.LongValues {
private final long[] values;
private final FixedBitSet set;
- private final Iter.Single iter = new Iter.Single();
LongValues(long[] values, FixedBitSet set) {
+ super(false);
this.values = values;
this.set = set;
}
- @Override
- public boolean isMultiValued() {
- return false;
- }
-
@Override
public boolean hasValue(int docId) {
return set.get(docId);
@@ -527,33 +452,6 @@ public boolean hasValue(int docId) {
public long getValue(int docId) {
return values[docId];
}
-
- @Override
- public long getValueMissing(int docId, long missingValue) {
- if (set.get(docId)) {
- return values[docId];
- } else {
- return missingValue;
- }
- }
-
- @Override
- public Iter getIter(int docId) {
- if (set.get(docId)) {
- return iter.reset(values[docId]);
- } else {
- return Iter.Empty.INSTANCE;
- }
- }
-
- @Override
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- if (set.get(docId)) {
- proc.onValue(docId, values[docId]);
- } else {
- proc.onMissing(docId);
- }
- }
}
static class DoubleValues implements org.elasticsearch.index.fielddata.DoubleValues {
@@ -702,44 +600,21 @@ public void forEachValueInDoc(int docId, ValueInDocProc proc) {
}
}
- static class LongValues implements org.elasticsearch.index.fielddata.LongValues {
+ static class LongValues extends org.elasticsearch.index.fielddata.LongValues.DenseLongValues {
private final long[] values;
private final Iter.Single iter = new Iter.Single();
LongValues(long[] values) {
+ super(false);
this.values = values;
}
- @Override
- public boolean isMultiValued() {
- return false;
- }
-
- @Override
- public boolean hasValue(int docId) {
- return true;
- }
-
@Override
public long getValue(int docId) {
return values[docId];
}
- @Override
- public long getValueMissing(int docId, long missingValue) {
- return values[docId];
- }
-
- @Override
- public Iter getIter(int docId) {
- return iter.reset(values[docId]);
- }
-
- @Override
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- proc.onValue(docId, values[docId]);
- }
}
static class DoubleValues implements org.elasticsearch.index.fielddata.DoubleValues {
diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/ShortArrayAtomicFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/ShortArrayAtomicFieldData.java
index ff3ccdfd0477b..68724c71d2442 100644
--- a/src/main/java/org/elasticsearch/index/fielddata/plain/ShortArrayAtomicFieldData.java
+++ b/src/main/java/org/elasticsearch/index/fielddata/plain/ShortArrayAtomicFieldData.java
@@ -21,11 +21,13 @@
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.RamUsage;
-import org.elasticsearch.index.fielddata.*;
+import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
+import org.elasticsearch.index.fielddata.BytesValues;
+import org.elasticsearch.index.fielddata.DoubleValues;
+import org.elasticsearch.index.fielddata.LongValues;
+import org.elasticsearch.index.fielddata.ScriptDocValues;
+import org.elasticsearch.index.fielddata.StringValues;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
-import org.elasticsearch.index.fielddata.util.DoubleArrayRef;
-import org.elasticsearch.index.fielddata.util.IntArrayRef;
-import org.elasticsearch.index.fielddata.util.LongArrayRef;
/**
*/
@@ -146,89 +148,20 @@ public DoubleValues getDoubleValues() {
return new DoubleValues(values, ordinals.ordinals());
}
- static class LongValues implements org.elasticsearch.index.fielddata.LongValues {
+ static class LongValues extends org.elasticsearch.index.fielddata.LongValues.OrdBasedLongValues {
private final short[] values;
- private final Ordinals.Docs ordinals;
- private final ValuesIter iter;
LongValues(short[] values, Ordinals.Docs ordinals) {
+ super(ordinals);
this.values = values;
- this.ordinals = ordinals;
- this.iter = new ValuesIter(values);
- }
-
- @Override
- public boolean isMultiValued() {
- return ordinals.isMultiValued();
- }
-
- @Override
- public boolean hasValue(int docId) {
- return ordinals.getOrd(docId) != 0;
- }
-
- @Override
- public long getValue(int docId) {
- return (long) values[ordinals.getOrd(docId)];
}
@Override
- public long getValueMissing(int docId, long missingValue) {
- int ord = ordinals.getOrd(docId);
- if (ord == 0) {
- return missingValue;
- } else {
- return (long) values[ord];
- }
+ public long getByOrd(int ord) {
+ return (long) values[ord];
}
- @Override
- public Iter getIter(int docId) {
- return iter.reset(ordinals.getIter(docId));
- }
-
- @Override
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- Ordinals.Docs.Iter iter = ordinals.getIter(docId);
- int ord = iter.next();
- if (ord == 0) {
- proc.onMissing(docId);
- return;
- }
- do {
- proc.onValue(docId, (long) values[ord]);
- } while ((ord = iter.next()) != 0);
- }
-
- static class ValuesIter implements Iter {
-
- private final short[] values;
- private Ordinals.Docs.Iter ordsIter;
- private int ord;
-
- ValuesIter(short[] values) {
- this.values = values;
- }
-
- public ValuesIter reset(Ordinals.Docs.Iter ordsIter) {
- this.ordsIter = ordsIter;
- this.ord = ordsIter.next();
- return this;
- }
-
- @Override
- public boolean hasNext() {
- return ord != 0;
- }
-
- @Override
- public long next() {
- short value = values[ord];
- ord = ordsIter.next();
- return (long) value;
- }
- }
}
static class DoubleValues implements org.elasticsearch.index.fielddata.DoubleValues {
@@ -373,22 +306,17 @@ public DoubleValues getDoubleValues() {
return new DoubleValues(values, set);
}
- static class LongValues implements org.elasticsearch.index.fielddata.LongValues {
+ static class LongValues extends org.elasticsearch.index.fielddata.LongValues {
private final short[] values;
private final FixedBitSet set;
- private final Iter.Single iter = new Iter.Single();
LongValues(short[] values, FixedBitSet set) {
+ super(false);
this.values = values;
this.set = set;
}
- @Override
- public boolean isMultiValued() {
- return false;
- }
-
@Override
public boolean hasValue(int docId) {
return set.get(docId);
@@ -398,33 +326,6 @@ public boolean hasValue(int docId) {
public long getValue(int docId) {
return (long) values[docId];
}
-
- @Override
- public long getValueMissing(int docId, long missingValue) {
- if (set.get(docId)) {
- return (long) values[docId];
- } else {
- return missingValue;
- }
- }
-
- @Override
- public Iter getIter(int docId) {
- if (set.get(docId)) {
- return iter.reset((long) values[docId]);
- } else {
- return Iter.Empty.INSTANCE;
- }
- }
-
- @Override
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- if (set.get(docId)) {
- proc.onValue(docId, (long) values[docId]);
- } else {
- proc.onMissing(docId);
- }
- }
}
static class DoubleValues implements org.elasticsearch.index.fielddata.DoubleValues {
@@ -541,44 +442,20 @@ public DoubleValues getDoubleValues() {
return new DoubleValues(values);
}
- static class LongValues implements org.elasticsearch.index.fielddata.LongValues {
+ static class LongValues extends org.elasticsearch.index.fielddata.LongValues.DenseLongValues {
private final short[] values;
- private final Iter.Single iter = new Iter.Single();
LongValues(short[] values) {
+ super(false);
this.values = values;
}
- @Override
- public boolean isMultiValued() {
- return false;
- }
-
- @Override
- public boolean hasValue(int docId) {
- return true;
- }
-
@Override
public long getValue(int docId) {
return (long) values[docId];
}
- @Override
- public long getValueMissing(int docId, long missingValue) {
- return (long) values[docId];
- }
-
- @Override
- public Iter getIter(int docId) {
- return iter.reset((long) values[docId]);
- }
-
- @Override
- public void forEachValueInDoc(int docId, ValueInDocProc proc) {
- proc.onValue(docId, (long) values[docId]);
- }
}
static class DoubleValues implements org.elasticsearch.index.fielddata.DoubleValues {
diff --git a/src/main/java/org/elasticsearch/search/facet/LongFacetAggregatorBase.java b/src/main/java/org/elasticsearch/search/facet/LongFacetAggregatorBase.java
new file mode 100644
index 0000000000000..f356c6e2d37c0
--- /dev/null
+++ b/src/main/java/org/elasticsearch/search/facet/LongFacetAggregatorBase.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to ElasticSearch and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. ElasticSearch licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.search.facet;
+
+import org.elasticsearch.index.fielddata.LongValues;
+import org.elasticsearch.index.fielddata.LongValues.Iter;
+
+/**
+ * Simple Facet aggregator base class for {@link LongValues}
+ */
+public abstract class LongFacetAggregatorBase {
+ private int total;
+ private int missing;
+
+ public void onDoc(int docId, LongValues values) {
+ if (values.hasValue(docId)) {
+ final Iter iter = values.getIter(docId);
+ while(iter.hasNext()) {
+ onValue(docId, iter.next());
+ total++;
+ }
+ } else {
+ missing++;
+ }
+ }
+
+ protected abstract void onValue(int docId, long next);
+
+ public final int total() {
+ return total;
+ }
+
+ public final int missing() {
+ return missing;
+ }
+}
diff --git a/src/main/java/org/elasticsearch/search/facet/datehistogram/CountDateHistogramFacetExecutor.java b/src/main/java/org/elasticsearch/search/facet/datehistogram/CountDateHistogramFacetExecutor.java
index 497b03bdee516..b1ef4169b48ef 100644
--- a/src/main/java/org/elasticsearch/search/facet/datehistogram/CountDateHistogramFacetExecutor.java
+++ b/src/main/java/org/elasticsearch/search/facet/datehistogram/CountDateHistogramFacetExecutor.java
@@ -27,6 +27,7 @@
import org.elasticsearch.index.fielddata.LongValues;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
+import org.elasticsearch.search.facet.LongFacetAggregatorBase;
import java.io.IOException;
@@ -76,7 +77,7 @@ public void setNextReader(AtomicReaderContext context) throws IOException {
@Override
public void collect(int doc) throws IOException {
- values.forEachValueInDoc(doc, histoProc);
+ histoProc.onDoc(doc, values);
}
@Override
@@ -84,7 +85,7 @@ public void postCollection() {
}
}
- public static class DateHistogramProc implements LongValues.ValueInDocProc {
+ public static class DateHistogramProc extends LongFacetAggregatorBase {
private final TLongLongHashMap counts;
private final TimeZoneRounding tzRounding;
@@ -94,10 +95,6 @@ public DateHistogramProc(TLongLongHashMap counts, TimeZoneRounding tzRounding) {
this.tzRounding = tzRounding;
}
- @Override
- public void onMissing(int docId) {
- }
-
@Override
public void onValue(int docId, long value) {
counts.adjustOrPutValue(tzRounding.calc(value), 1, 1);
diff --git a/src/main/java/org/elasticsearch/search/facet/datehistogram/ValueDateHistogramFacetExecutor.java b/src/main/java/org/elasticsearch/search/facet/datehistogram/ValueDateHistogramFacetExecutor.java
index 19c18cd3c5d86..e723f6239ee3b 100644
--- a/src/main/java/org/elasticsearch/search/facet/datehistogram/ValueDateHistogramFacetExecutor.java
+++ b/src/main/java/org/elasticsearch/search/facet/datehistogram/ValueDateHistogramFacetExecutor.java
@@ -28,6 +28,7 @@
import org.elasticsearch.index.fielddata.LongValues;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
+import org.elasticsearch.search.facet.LongFacetAggregatorBase;
import java.io.IOException;
@@ -79,7 +80,7 @@ public void setNextReader(AtomicReaderContext context) throws IOException {
@Override
public void collect(int doc) throws IOException {
- keyValues.forEachValueInDoc(doc, histoProc);
+ histoProc.onDoc(doc, keyValues);
}
@Override
@@ -87,7 +88,7 @@ public void postCollection() {
}
}
- public static class DateHistogramProc implements LongValues.ValueInDocProc {
+ public static class DateHistogramProc extends LongFacetAggregatorBase {
final ExtTLongObjectHashMap<InternalFullDateHistogramFacet.FullEntry> entries;
private final TimeZoneRounding tzRounding;
@@ -101,10 +102,6 @@ public DateHistogramProc(TimeZoneRounding tzRounding, ExtTLongObjectHashMap<Inte
this.entries = entries;
}
- @Override
- public void onMissing(int docId) {
- }
-
@Override
public void onValue(int docId, long value) {
long time = tzRounding.calc(value);
diff --git a/src/main/java/org/elasticsearch/search/facet/datehistogram/ValueScriptDateHistogramFacetExecutor.java b/src/main/java/org/elasticsearch/search/facet/datehistogram/ValueScriptDateHistogramFacetExecutor.java
index 0a5528b619889..33977a897e759 100644
--- a/src/main/java/org/elasticsearch/search/facet/datehistogram/ValueScriptDateHistogramFacetExecutor.java
+++ b/src/main/java/org/elasticsearch/search/facet/datehistogram/ValueScriptDateHistogramFacetExecutor.java
@@ -29,6 +29,7 @@
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
+import org.elasticsearch.search.facet.LongFacetAggregatorBase;
import java.io.IOException;
@@ -86,7 +87,7 @@ public void setNextReader(AtomicReaderContext context) throws IOException {
@Override
public void collect(int doc) throws IOException {
- keyValues.forEachValueInDoc(doc, histoProc);
+ histoProc.onDoc(doc, keyValues);
}
@Override
@@ -94,7 +95,7 @@ public void postCollection() {
}
}
- public static class DateHistogramProc implements LongValues.ValueInDocProc {
+ public static class DateHistogramProc extends LongFacetAggregatorBase {
private final TimeZoneRounding tzRounding;
protected final SearchScript valueScript;
@@ -107,10 +108,6 @@ public DateHistogramProc(TimeZoneRounding tzRounding, SearchScript valueScript,
this.entries = entries;
}
- @Override
- public void onMissing(int docId) {
- }
-
@Override
public void onValue(int docId, long value) {
valueScript.setNextDocId(docId);
diff --git a/src/main/java/org/elasticsearch/search/facet/terms/longs/TermsLongFacetExecutor.java b/src/main/java/org/elasticsearch/search/facet/terms/longs/TermsLongFacetExecutor.java
index 9497638747dfa..0f0ad77556cc4 100644
--- a/src/main/java/org/elasticsearch/search/facet/terms/longs/TermsLongFacetExecutor.java
+++ b/src/main/java/org/elasticsearch/search/facet/terms/longs/TermsLongFacetExecutor.java
@@ -34,6 +34,7 @@
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
+import org.elasticsearch.search.facet.LongFacetAggregatorBase;
import org.elasticsearch.search.facet.terms.TermsFacet;
import org.elasticsearch.search.facet.terms.support.EntryPriorityQueue;
import org.elasticsearch.search.internal.SearchContext;
@@ -50,7 +51,6 @@ public class TermsLongFacetExecutor extends FacetExecutor {
private final IndexNumericFieldData indexFieldData;
private final TermsFacet.ComparatorType comparatorType;
private final int size;
- private final int numberOfShards;
private final SearchScript script;
private final ImmutableSet<BytesRef> excluded;
@@ -63,7 +63,6 @@ public TermsLongFacetExecutor(IndexNumericFieldData indexFieldData, int size, Te
this.indexFieldData = indexFieldData;
this.size = size;
this.comparatorType = comparatorType;
- this.numberOfShards = context.numberOfShards();
this.script = script;
this.excluded = excluded;
@@ -147,7 +146,7 @@ public void setNextReader(AtomicReaderContext context) throws IOException {
@Override
public void collect(int doc) throws IOException {
- values.forEachValueInDoc(doc, aggregator);
+ aggregator.onDoc(doc, values);
}
@Override
@@ -200,13 +199,10 @@ public void onValue(int docId, long value) {
}
}
- public static class StaticAggregatorValueProc implements LongValues.ValueInDocProc {
+ public static class StaticAggregatorValueProc extends LongFacetAggregatorBase {
private final TLongIntHashMap facets;
- private int missing;
- private int total;
-
public StaticAggregatorValueProc(TLongIntHashMap facets) {
this.facets = facets;
}
@@ -214,24 +210,10 @@ public StaticAggregatorValueProc(TLongIntHashMap facets) {
@Override
public void onValue(int docId, long value) {
facets.adjustOrPutValue(value, 1, 1);
- total++;
- }
-
- @Override
- public void onMissing(int docId) {
- missing++;
}
public final TLongIntHashMap facets() {
return facets;
}
-
- public final int missing() {
- return this.missing;
- }
-
- public final int total() {
- return this.total;
- }
}
}
diff --git a/src/main/java/org/elasticsearch/search/facet/termsstats/longs/TermsStatsLongFacetExecutor.java b/src/main/java/org/elasticsearch/search/facet/termsstats/longs/TermsStatsLongFacetExecutor.java
index 014240fffd163..904be4e63e31c 100644
--- a/src/main/java/org/elasticsearch/search/facet/termsstats/longs/TermsStatsLongFacetExecutor.java
+++ b/src/main/java/org/elasticsearch/search/facet/termsstats/longs/TermsStatsLongFacetExecutor.java
@@ -31,6 +31,7 @@
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.facet.FacetExecutor;
import org.elasticsearch.search.facet.InternalFacet;
+import org.elasticsearch.search.facet.LongFacetAggregatorBase;
import org.elasticsearch.search.facet.termsstats.TermsStatsFacet;
import org.elasticsearch.search.internal.SearchContext;
@@ -128,19 +129,18 @@ public void setNextReader(AtomicReaderContext context) throws IOException {
@Override
public void collect(int doc) throws IOException {
- keyValues.forEachValueInDoc(doc, aggregator);
+ aggregator.onDoc(doc, keyValues);
}
@Override
public void postCollection() {
- TermsStatsLongFacetExecutor.this.missing = aggregator.missing;
+ TermsStatsLongFacetExecutor.this.missing = aggregator.missing();
}
}
- public static class Aggregator implements LongValues.ValueInDocProc {
+ public static class Aggregator extends LongFacetAggregatorBase {
final ExtTLongObjectHashMap<InternalTermsStatsLongFacet.LongEntry> entries;
- int missing;
DoubleValues valueValues;
final ValueAggregator valueAggregator = new ValueAggregator();
@@ -160,10 +160,6 @@ public void onValue(int docId, long value) {
valueValues.forEachValueInDoc(docId, valueAggregator);
}
- @Override
- public void onMissing(int docId) {
- missing++;
- }
public static class ValueAggregator implements DoubleValues.ValueInDocProc {
diff --git a/src/test/java/org/elasticsearch/test/unit/index/fielddata/AbstractFieldDataTests.java b/src/test/java/org/elasticsearch/test/unit/index/fielddata/AbstractFieldDataTests.java
index 731c333205f48..dc1797186e200 100644
--- a/src/test/java/org/elasticsearch/test/unit/index/fielddata/AbstractFieldDataTests.java
+++ b/src/test/java/org/elasticsearch/test/unit/index/fielddata/AbstractFieldDataTests.java
@@ -19,26 +19,33 @@
package org.elasticsearch.test.unit.index.fielddata;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.sameInstance;
+
+import java.util.ArrayList;
+import java.util.List;
+
import org.apache.lucene.analysis.standard.StandardAnalyzer;
-import org.apache.lucene.index.*;
+import org.apache.lucene.index.AtomicReader;
+import org.apache.lucene.index.AtomicReaderContext;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.store.RAMDirectory;
-import org.apache.lucene.util.BytesRef;
-import org.elasticsearch.common.lucene.HashedBytesRef;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.index.Index;
-import org.elasticsearch.index.fielddata.*;
+import org.elasticsearch.index.fielddata.DoubleValues;
+import org.elasticsearch.index.fielddata.FieldDataType;
+import org.elasticsearch.index.fielddata.IndexFieldData;
+import org.elasticsearch.index.fielddata.IndexFieldDataService;
+import org.elasticsearch.index.fielddata.StringValues;
import org.elasticsearch.index.mapper.FieldMapper;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
-import java.util.ArrayList;
-import java.util.List;
-
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.sameInstance;
-
/**
*/
@Test
@@ -115,42 +122,6 @@ public void onMissing(int docId) {
}
}
- public static class LongValuesVerifierProc implements LongValues.ValueInDocProc {
-
- private static final Long MISSING = new Long(0);
-
- private final int docId;
- private final List<Long> expected = new ArrayList<Long>();
-
- private int idx;
-
- LongValuesVerifierProc(int docId) {
- this.docId = docId;
- }
-
- public LongValuesVerifierProc addExpected(long value) {
- expected.add(value);
- return this;
- }
-
- public LongValuesVerifierProc addMissing() {
- expected.add(MISSING);
- return this;
- }
-
- @Override
- public void onValue(int docId, long value) {
- assertThat(docId, equalTo(this.docId));
- assertThat(value, equalTo(expected.get(idx++)));
- }
-
- @Override
- public void onMissing(int docId) {
- assertThat(docId, equalTo(this.docId));
- assertThat(MISSING, sameInstance(expected.get(idx++)));
- }
- }
-
public static class DoubleValuesVerifierProc implements DoubleValues.ValueInDocProc {
private static final Double MISSING = new Double(0);
diff --git a/src/test/java/org/elasticsearch/test/unit/index/fielddata/NumericFieldDataTests.java b/src/test/java/org/elasticsearch/test/unit/index/fielddata/NumericFieldDataTests.java
index 73c24ff975982..999176424cb95 100644
--- a/src/test/java/org/elasticsearch/test/unit/index/fielddata/NumericFieldDataTests.java
+++ b/src/test/java/org/elasticsearch/test/unit/index/fielddata/NumericFieldDataTests.java
@@ -79,10 +79,6 @@ public void testSingleValueAllSetNumber() throws Exception {
assertThat(longValuesIter.next(), equalTo(3l));
assertThat(longValuesIter.hasNext(), equalTo(false));
- longValues.forEachValueInDoc(0, new LongValuesVerifierProc(0).addExpected(2l));
- longValues.forEachValueInDoc(1, new LongValuesVerifierProc(1).addExpected(1l));
- longValues.forEachValueInDoc(2, new LongValuesVerifierProc(2).addExpected(3l));
-
DoubleValues doubleValues = fieldData.getDoubleValues();
assertThat(doubleValues.isMultiValued(), equalTo(false));
@@ -172,10 +168,6 @@ public void testSingleValueWithMissingNumber() throws Exception {
assertThat(longValuesIter.next(), equalTo(3l));
assertThat(longValuesIter.hasNext(), equalTo(false));
- longValues.forEachValueInDoc(0, new LongValuesVerifierProc(0).addExpected(2l));
- longValues.forEachValueInDoc(1, new LongValuesVerifierProc(1).addMissing());
- longValues.forEachValueInDoc(2, new LongValuesVerifierProc(2).addExpected(3l));
-
DoubleValues doubleValues = fieldData.getDoubleValues();
assertThat(doubleValues.isMultiValued(), equalTo(false));
@@ -295,10 +287,6 @@ public void testMultiValueAllSetNumber() throws Exception {
assertThat(longValuesIter.next(), equalTo(3l));
assertThat(longValuesIter.hasNext(), equalTo(false));
- longValues.forEachValueInDoc(0, new LongValuesVerifierProc(0).addExpected(2l).addExpected(4l));
- longValues.forEachValueInDoc(1, new LongValuesVerifierProc(1).addExpected(1l));
- longValues.forEachValueInDoc(2, new LongValuesVerifierProc(2).addExpected(3l));
-
DoubleValues doubleValues = fieldData.getDoubleValues();
assertThat(doubleValues.isMultiValued(), equalTo(true));
@@ -375,10 +363,6 @@ public void testMultiValueWithMissingNumber() throws Exception {
assertThat(longValuesIter.next(), equalTo(3l));
assertThat(longValuesIter.hasNext(), equalTo(false));
- longValues.forEachValueInDoc(0, new LongValuesVerifierProc(0).addExpected(2l).addExpected(4l));
- longValues.forEachValueInDoc(1, new LongValuesVerifierProc(1).addMissing());
- longValues.forEachValueInDoc(2, new LongValuesVerifierProc(2).addExpected(3l));
-
DoubleValues doubleValues = fieldData.getDoubleValues();
assertThat(doubleValues.isMultiValued(), equalTo(true));
@@ -445,10 +429,6 @@ public void testMissingValueForAll() throws Exception {
longValuesIter = longValues.getIter(2);
assertThat(longValuesIter.hasNext(), equalTo(false));
- longValues.forEachValueInDoc(0, new LongValuesVerifierProc(0).addMissing());
- longValues.forEachValueInDoc(1, new LongValuesVerifierProc(1).addMissing());
- longValues.forEachValueInDoc(2, new LongValuesVerifierProc(2).addMissing());
-
// double values
DoubleValues doubleValues = fieldData.getDoubleValues();
|
a8924692252c329e7eaa03db07b03137b7b5ac22
|
intellij-community
|
class kind calculation may produce INRE- (SCL-1349)--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/source/com/intellij/ide/projectView/impl/nodes/ClassTreeNode.java b/source/com/intellij/ide/projectView/impl/nodes/ClassTreeNode.java
index e1cac5acc06e3..d9455271e6926 100644
--- a/source/com/intellij/ide/projectView/impl/nodes/ClassTreeNode.java
+++ b/source/com/intellij/ide/projectView/impl/nodes/ClassTreeNode.java
@@ -6,6 +6,7 @@
import com.intellij.ide.projectView.ViewSettings;
import com.intellij.ide.util.treeView.AbstractTreeNode;
import com.intellij.openapi.project.Project;
+import com.intellij.openapi.project.IndexNotReadyException;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.impl.ElementPresentationUtil;
@@ -108,15 +109,20 @@ public static int getClassPosition(final PsiClass aClass) {
if (aClass == null || !aClass.isValid()) {
return 0;
}
- int pos = ElementPresentationUtil.getClassKind(aClass);
- //abstract class before concrete
- if (pos == ElementPresentationUtil.CLASS_KIND_CLASS || pos == ElementPresentationUtil.CLASS_KIND_EXCEPTION) {
- boolean isAbstract = aClass.hasModifierProperty(PsiModifier.ABSTRACT) && !aClass.isInterface();
- if (isAbstract) {
- pos --;
+ try {
+ int pos = ElementPresentationUtil.getClassKind(aClass);
+ //abstract class before concrete
+ if (pos == ElementPresentationUtil.CLASS_KIND_CLASS || pos == ElementPresentationUtil.CLASS_KIND_EXCEPTION) {
+ boolean isAbstract = aClass.hasModifierProperty(PsiModifier.ABSTRACT) && !aClass.isInterface();
+ if (isAbstract) {
+ pos --;
+ }
}
+ return pos;
+ }
+ catch (IndexNotReadyException e) {
+ return 0;
}
- return pos;
}
private class ClassNameSortKey implements Comparable {
|
b6fcb0ba231e39b85e71c61ed5521fc67a2f98e8
|
drools
|
BZ975922 - GRE constraint operator list box- problems when re-opening file--
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceImpl.java b/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceImpl.java
index fdaca19222e..c6b56e247ca 100644
--- a/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceImpl.java
+++ b/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceImpl.java
@@ -1730,8 +1730,8 @@ public RuleModel unmarshalUsingDSL( final String str,
dmo );
}
- private ExpandedDRLInfo parseDSLs( ExpandedDRLInfo expandedDRLInfo,
- String[] dsls ) {
+ private ExpandedDRLInfo parseDSLs( final ExpandedDRLInfo expandedDRLInfo,
+ final String[] dsls ) {
for ( String dsl : dsls ) {
for ( String line : dsl.split( "\n" ) ) {
String dslPattern = line.trim();
@@ -1751,7 +1751,7 @@ private ExpandedDRLInfo parseDSLs( ExpandedDRLInfo expandedDRLInfo,
return expandedDRLInfo;
}
- private String removeDslTopics( String line ) {
+ private String removeDslTopics( final String line ) {
int lastClosedSquare = -1;
boolean lookForOpen = true;
for ( int i = 0; i < line.length(); i++ ) {
@@ -1772,12 +1772,13 @@ private String removeDslTopics( String line ) {
return line.substring( lastClosedSquare + 1 );
}
- private String extractDslPattern( String line ) {
- return line.substring( 0, line.indexOf( '=' ) ).trim();
+ private String extractDslPattern( final String line ) {
+ return line.substring( 0,
+ line.indexOf( '=' ) ).trim();
}
- private RuleModel getRuleModel( ExpandedDRLInfo expandedDRLInfo,
- PackageDataModelOracle dmo ) {
+ private RuleModel getRuleModel( final ExpandedDRLInfo expandedDRLInfo,
+ final PackageDataModelOracle dmo ) {
//De-serialize model
RuleDescr ruleDescr = parseDrl( expandedDRLInfo );
RuleModel model = new RuleModel();
@@ -1787,7 +1788,8 @@ private RuleModel getRuleModel( ExpandedDRLInfo expandedDRLInfo,
Map<String, AnnotationDescr> annotations = ruleDescr.getAnnotations();
if ( annotations != null ) {
for ( AnnotationDescr annotation : annotations.values() ) {
- model.addMetadata( new RuleMetadata( annotation.getName(), annotation.getValuesAsString() ) );
+ model.addMetadata( new RuleMetadata( annotation.getName(),
+ annotation.getValuesAsString() ) );
}
}
@@ -1803,7 +1805,10 @@ private RuleModel getRuleModel( ExpandedDRLInfo expandedDRLInfo,
boolean isJavaDialect = parseAttributes( model,
ruleDescr.getAttributes() );
- Map<String, String> boundParams = parseLhs( model, ruleDescr.getLhs(), expandedDRLInfo, dmo );
+ Map<String, String> boundParams = parseLhs( model,
+ ruleDescr.getLhs(),
+ expandedDRLInfo,
+ dmo );
parseRhs( model,
expandedDRLInfo.consequence != null ? expandedDRLInfo.consequence : (String) ruleDescr.getConsequence(),
isJavaDialect,
@@ -1813,8 +1818,8 @@ private RuleModel getRuleModel( ExpandedDRLInfo expandedDRLInfo,
return model;
}
- private ExpandedDRLInfo preprocessDRL( String str,
- boolean hasDsl ) {
+ private ExpandedDRLInfo preprocessDRL( final String str,
+ final boolean hasDsl ) {
StringBuilder drl = new StringBuilder();
String thenLine = null;
List<String> lhsStatements = new ArrayList<String>();
@@ -1869,11 +1874,11 @@ private int parenthesisBalance( String str ) {
return balance;
}
- private ExpandedDRLInfo createExpandedDRLInfo( boolean hasDsl,
- StringBuilder drl,
- String thenLine,
- List<String> lhsStatements,
- List<String> rhsStatements ) {
+ private ExpandedDRLInfo createExpandedDRLInfo( final boolean hasDsl,
+ final StringBuilder drl,
+ final String thenLine,
+ final List<String> lhsStatements,
+ final List<String> rhsStatements ) {
if ( !hasDsl ) {
return processFreeFormStatement( drl,
thenLine,
@@ -1906,13 +1911,15 @@ private ExpandedDRLInfo createExpandedDRLInfo( boolean hasDsl,
lineCounter++;
String trimmed = statement.trim();
if ( trimmed.endsWith( "end" ) ) {
- trimmed = trimmed.substring( 0, trimmed.length() - 3 ).trim();
+ trimmed = trimmed.substring( 0,
+ trimmed.length() - 3 ).trim();
}
if ( trimmed.length() > 0 ) {
if ( hasDsl && trimmed.startsWith( ">" ) ) {
drl.append( trimmed.substring( 1 ) ).append( "\n" );
} else {
- expandedDRLInfo.dslStatementsInRhs.put( lineCounter, trimmed );
+ expandedDRLInfo.dslStatementsInRhs.put( lineCounter,
+ trimmed );
}
}
}
@@ -1922,10 +1929,10 @@ private ExpandedDRLInfo createExpandedDRLInfo( boolean hasDsl,
return expandedDRLInfo;
}
- private ExpandedDRLInfo processFreeFormStatement( StringBuilder drl,
- String thenLine,
- List<String> lhsStatements,
- List<String> rhsStatements ) {
+ private ExpandedDRLInfo processFreeFormStatement( final StringBuilder drl,
+ final String thenLine,
+ final List<String> lhsStatements,
+ final List<String> rhsStatements ) {
ExpandedDRLInfo expandedDRLInfo = new ExpandedDRLInfo( false );
int lineCounter = -1;
@@ -1959,7 +1966,7 @@ private ExpandedDRLInfo processFreeFormStatement( StringBuilder drl,
return expandedDRLInfo;
}
- private boolean isValidLHSStatement( String lhs ) {
+ private boolean isValidLHSStatement( final String lhs ) {
// TODO: How to identify a non valid (free form) lhs statement?
return ( lhs.indexOf( '(' ) >= 0 || lhs.indexOf( ':' ) >= 0 ) && lhs.indexOf( "//" ) == -1;
}
@@ -1982,7 +1989,7 @@ private static class ExpandedDRLInfo {
private Set<String> globals = new HashSet<String>();
- private ExpandedDRLInfo( boolean hasDsl ) {
+ private ExpandedDRLInfo( final boolean hasDsl ) {
this.hasDsl = hasDsl;
dslStatementsInLhs = new HashMap<Integer, String>();
dslStatementsInRhs = new HashMap<Integer, String>();
@@ -1991,12 +1998,12 @@ private ExpandedDRLInfo( boolean hasDsl ) {
rhsDslPatterns = new ArrayList<String>();
}
- public boolean hasGlobal( String name ) {
+ public boolean hasGlobal( final String name ) {
return globals.contains( name );
}
- public ExpandedDRLInfo registerGlobals( PackageDataModelOracle dmo,
- List<String> globalStatements ) {
+ public ExpandedDRLInfo registerGlobals( final PackageDataModelOracle dmo,
+ final List<String> globalStatements ) {
if ( globalStatements != null ) {
for ( String globalStatement : globalStatements ) {
String identifier = getIdentifier( globalStatement );
@@ -2023,12 +2030,13 @@ private String getIdentifier( String globalStatement ) {
}
String identifier = globalStatement.substring( lastSpace + 1 );
if ( identifier.endsWith( ";" ) ) {
- identifier = identifier.substring( 0, identifier.length() - 1 );
+ identifier = identifier.substring( 0,
+ identifier.length() - 1 );
}
return identifier;
}
- public ExpandedDRLInfo registerGlobalDescrs( List<GlobalDescr> globalDescrs ) {
+ public ExpandedDRLInfo registerGlobalDescrs( final List<GlobalDescr> globalDescrs ) {
if ( globalDescrs != null ) {
for ( GlobalDescr globalDescr : globalDescrs ) {
globals.add( globalDescr.getIdentifier() );
@@ -2038,7 +2046,7 @@ public ExpandedDRLInfo registerGlobalDescrs( List<GlobalDescr> globalDescrs ) {
}
}
- private RuleDescr parseDrl( ExpandedDRLInfo expandedDRLInfo ) {
+ private RuleDescr parseDrl( final ExpandedDRLInfo expandedDRLInfo ) {
DrlParser drlParser = new DrlParser();
PackageDescr packageDescr;
try {
@@ -2050,13 +2058,14 @@ private RuleDescr parseDrl( ExpandedDRLInfo expandedDRLInfo ) {
return packageDescr.getRules().get( 0 );
}
- private boolean parseAttributes( RuleModel m,
- Map<String, AttributeDescr> attributes ) {
+ private boolean parseAttributes( final RuleModel m,
+ final Map<String, AttributeDescr> attributes ) {
boolean isJavaDialect = false;
for ( Map.Entry<String, AttributeDescr> entry : attributes.entrySet() ) {
String name = entry.getKey();
String value = normalizeAttributeValue( entry.getValue().getValue().trim() );
- RuleAttribute ruleAttribute = new RuleAttribute( name, value );
+ RuleAttribute ruleAttribute = new RuleAttribute( name,
+ value );
m.addAttribute( ruleAttribute );
isJavaDialect |= name.equals( "dialect" ) && value.equals( "java" );
}
@@ -2086,47 +2095,59 @@ private String stripQuotes( String value ) {
return value;
}
- private Map<String, String> parseLhs( RuleModel m,
- AndDescr lhs,
- ExpandedDRLInfo expandedDRLInfo,
- PackageDataModelOracle dmo ) {
+ private Map<String, String> parseLhs( final RuleModel m,
+ final AndDescr lhs,
+ final ExpandedDRLInfo expandedDRLInfo,
+ final PackageDataModelOracle dmo ) {
Map<String, String> boundParams = new HashMap<String, String>();
int lineCounter = -1;
for ( BaseDescr descr : lhs.getDescrs() ) {
- lineCounter = parseNonDrlInLhs( m, expandedDRLInfo, lineCounter );
- IPattern pattern = parseBaseDescr( m, descr, boundParams, dmo );
+ lineCounter = parseNonDrlInLhs( m,
+ expandedDRLInfo,
+ lineCounter );
+ IPattern pattern = parseBaseDescr( m,
+ descr,
+ boundParams,
+ dmo );
if ( pattern != null ) {
m.addLhsItem( pattern );
}
}
- parseNonDrlInLhs( m, expandedDRLInfo, lineCounter );
+ parseNonDrlInLhs( m,
+ expandedDRLInfo,
+ lineCounter );
return boundParams;
}
- private int parseNonDrlInLhs( RuleModel m,
- ExpandedDRLInfo expandedDRLInfo,
+ private int parseNonDrlInLhs( final RuleModel m,
+ final ExpandedDRLInfo expandedDRLInfo,
int lineCounter ) {
lineCounter++;
- lineCounter = parseDslInLhs( m, expandedDRLInfo, lineCounter );
- lineCounter = parseFreeForm( m, expandedDRLInfo, lineCounter );
+ lineCounter = parseDslInLhs( m,
+ expandedDRLInfo,
+ lineCounter );
+ lineCounter = parseFreeForm( m,
+ expandedDRLInfo,
+ lineCounter );
return lineCounter;
}
- private int parseDslInLhs( RuleModel m,
- ExpandedDRLInfo expandedDRLInfo,
+ private int parseDslInLhs( final RuleModel m,
+ final ExpandedDRLInfo expandedDRLInfo,
int lineCounter ) {
if ( expandedDRLInfo.hasDsl ) {
String dslLine = expandedDRLInfo.dslStatementsInLhs.get( lineCounter );
while ( dslLine != null ) {
- m.addLhsItem( toDSLSentence( expandedDRLInfo.lhsDslPatterns, dslLine ) );
+ m.addLhsItem( toDSLSentence( expandedDRLInfo.lhsDslPatterns,
+ dslLine ) );
dslLine = expandedDRLInfo.dslStatementsInLhs.get( ++lineCounter );
}
}
return lineCounter;
}
- private int parseFreeForm( RuleModel m,
- ExpandedDRLInfo expandedDRLInfo,
+ private int parseFreeForm( final RuleModel m,
+ final ExpandedDRLInfo expandedDRLInfo,
int lineCounter ) {
String freeForm = expandedDRLInfo.freeFormStatementsInLhs.get( lineCounter );
while ( freeForm != null ) {
@@ -2138,39 +2159,55 @@ private int parseFreeForm( RuleModel m,
return lineCounter;
}
- private IPattern parseBaseDescr( RuleModel m,
- BaseDescr descr,
- Map<String, String> boundParams,
- PackageDataModelOracle dmo ) {
+ private IPattern parseBaseDescr( final RuleModel m,
+ final BaseDescr descr,
+ final Map<String, String> boundParams,
+ final PackageDataModelOracle dmo ) {
if ( descr instanceof PatternDescr ) {
- return parsePatternDescr( m, (PatternDescr) descr, boundParams, dmo );
+ return parsePatternDescr( m,
+ (PatternDescr) descr,
+ boundParams,
+ dmo );
} else if ( descr instanceof AndDescr ) {
AndDescr andDescr = (AndDescr) descr;
- return parseBaseDescr( m, andDescr.getDescrs().get( 0 ), boundParams, dmo );
+ return parseBaseDescr( m,
+ andDescr.getDescrs().get( 0 ),
+ boundParams,
+ dmo );
} else if ( descr instanceof EvalDescr ) {
FreeFormLine freeFormLine = new FreeFormLine();
freeFormLine.setText( "eval( " + ( (EvalDescr) descr ).getContent() + " )" );
return freeFormLine;
} else if ( descr instanceof ConditionalElementDescr ) {
- return parseExistentialElementDescr( m, (ConditionalElementDescr) descr, boundParams, dmo );
+ return parseExistentialElementDescr( m,
+ (ConditionalElementDescr) descr,
+ boundParams,
+ dmo );
}
return null;
}
- private IFactPattern parsePatternDescr( RuleModel m,
- PatternDescr pattern,
- Map<String, String> boundParams,
- PackageDataModelOracle dmo ) {
+ private IFactPattern parsePatternDescr( final RuleModel m,
+ final PatternDescr pattern,
+ final Map<String, String> boundParams,
+ final PackageDataModelOracle dmo ) {
if ( pattern.getSource() != null ) {
- return parsePatternSource( m, pattern, pattern.getSource(), boundParams, dmo );
- }
- return getFactPattern( m, pattern, boundParams, dmo );
- }
-
- private FactPattern getFactPattern( RuleModel m,
- PatternDescr pattern,
- Map<String, String> boundParams,
- PackageDataModelOracle dmo ) {
+ return parsePatternSource( m,
+ pattern,
+ pattern.getSource(),
+ boundParams,
+ dmo );
+ }
+ return getFactPattern( m,
+ pattern,
+ boundParams,
+ dmo );
+ }
+
+ private FactPattern getFactPattern( final RuleModel m,
+ final PatternDescr pattern,
+ final Map<String, String> boundParams,
+ final PackageDataModelOracle dmo ) {
String type = pattern.getObjectType();
FactPattern factPattern = new FactPattern( getSimpleFactType( type,
dmo ) );
@@ -2206,15 +2243,18 @@ private FactPattern getFactPattern( RuleModel m,
return factPattern;
}
- private IFactPattern parsePatternSource( RuleModel m,
- PatternDescr pattern,
- PatternSourceDescr patternSource,
- Map<String, String> boundParams,
- PackageDataModelOracle dmo ) {
+ private IFactPattern parsePatternSource( final RuleModel m,
+ final PatternDescr pattern,
+ final PatternSourceDescr patternSource,
+ final Map<String, String> boundParams,
+ final PackageDataModelOracle dmo ) {
if ( patternSource instanceof AccumulateDescr ) {
AccumulateDescr accumulate = (AccumulateDescr) patternSource;
FromAccumulateCompositeFactPattern fac = new FromAccumulateCompositeFactPattern();
- fac.setSourcePattern( parseBaseDescr( m, accumulate.getInput(), boundParams, dmo ) );
+ fac.setSourcePattern( parseBaseDescr( m,
+ accumulate.getInput(),
+ boundParams,
+ dmo ) );
FactPattern factPattern = new FactPattern( pattern.getObjectType() );
factPattern.setBoundName( pattern.getIdentifier() );
@@ -2286,8 +2326,12 @@ private IFactPattern parsePatternSource( RuleModel m,
String sourcePart = splitSource[ i ];
if ( i == 0 ) {
String type = boundParams.get( sourcePart );
- expression.appendPart( new ExpressionVariable( sourcePart, type, DataType.TYPE_NUMERIC ) );
- fields = findFields( dmo, m, type );
+ expression.appendPart( new ExpressionVariable( sourcePart,
+ type,
+ DataType.TYPE_NUMERIC ) );
+ fields = findFields( m,
+ dmo,
+ type );
} else {
ModelField modelField = null;
for ( ModelField field : fields ) {
@@ -2319,8 +2363,8 @@ private IFactPattern parsePatternSource( RuleModel m,
expression.appendPart( new ExpressionField( sourcePart,
modelField.getClassName(),
modelField.getType() ) );
- fields = findFields( dmo,
- m,
+ fields = findFields( m,
+ dmo,
modelField.getClassName() );
}
}
@@ -2331,49 +2375,63 @@ private IFactPattern parsePatternSource( RuleModel m,
throw new RuntimeException( "Unknown pattern source " + patternSource );
}
- private CompositeFactPattern parseExistentialElementDescr( RuleModel m,
- ConditionalElementDescr conditionalDescr,
- Map<String, String> boundParams,
- PackageDataModelOracle dmo ) {
+ private CompositeFactPattern parseExistentialElementDescr( final RuleModel m,
+ final ConditionalElementDescr conditionalDescr,
+ final Map<String, String> boundParams,
+ final PackageDataModelOracle dmo ) {
CompositeFactPattern comp = conditionalDescr instanceof NotDescr ?
new CompositeFactPattern( CompositeFactPattern.COMPOSITE_TYPE_NOT ) :
conditionalDescr instanceof OrDescr ?
new CompositeFactPattern( CompositeFactPattern.COMPOSITE_TYPE_OR ) :
new CompositeFactPattern( CompositeFactPattern.COMPOSITE_TYPE_EXISTS );
- addPatternToComposite( m, conditionalDescr, comp, boundParams, dmo );
+ addPatternToComposite( m,
+ conditionalDescr,
+ comp,
+ boundParams,
+ dmo );
IFactPattern[] patterns = comp.getPatterns();
return patterns != null && patterns.length > 0 ? comp : null;
}
- private void addPatternToComposite( RuleModel m,
- ConditionalElementDescr conditionalDescr,
- CompositeFactPattern comp,
- Map<String, String> boundParams,
- PackageDataModelOracle dmo ) {
+ private void addPatternToComposite( final RuleModel m,
+ final ConditionalElementDescr conditionalDescr,
+ final CompositeFactPattern comp,
+ final Map<String, String> boundParams,
+ final PackageDataModelOracle dmo ) {
for ( Object descr : conditionalDescr.getDescrs() ) {
if ( descr instanceof PatternDescr ) {
- comp.addFactPattern( parsePatternDescr( m, (PatternDescr) descr, boundParams, dmo ) );
+ comp.addFactPattern( parsePatternDescr( m,
+ (PatternDescr) descr,
+ boundParams,
+ dmo ) );
} else if ( descr instanceof ConditionalElementDescr ) {
- addPatternToComposite( m, (ConditionalElementDescr) descr, comp, boundParams, dmo );
+ addPatternToComposite( m,
+ (ConditionalElementDescr) descr,
+ comp,
+ boundParams,
+ dmo );
}
}
}
- private void parseConstraint( RuleModel m,
- FactPattern factPattern,
- ConditionalElementDescr constraint,
- Map<String, String> boundParams,
- PackageDataModelOracle dmo ) {
+ private void parseConstraint( final RuleModel m,
+ final FactPattern factPattern,
+ final ConditionalElementDescr constraint,
+ final Map<String, String> boundParams,
+ final PackageDataModelOracle dmo ) {
for ( BaseDescr descr : constraint.getDescrs() ) {
if ( descr instanceof ExprConstraintDescr ) {
ExprConstraintDescr exprConstraint = (ExprConstraintDescr) descr;
- Expr expr = parseExpr( exprConstraint.getExpression(), boundParams, dmo );
- factPattern.addConstraint( expr.asFieldConstraint( m, factPattern ) );
+ Expr expr = parseExpr( exprConstraint.getExpression(),
+ boundParams,
+ dmo );
+ factPattern.addConstraint( expr.asFieldConstraint( m,
+ factPattern ) );
}
}
}
- private static String findOperator( String expr ) {
+ private static String findOperator( final String expr ) {
final Set<String> potentialOperators = new HashSet<String>();
for ( Operator op : Operator.getAllOperators() ) {
String opString = op.getOperatorString();
@@ -2411,8 +2469,8 @@ private static String findOperator( String expr ) {
return null;
}
- private static boolean isInQuote( String expr,
- int pos ) {
+ private static boolean isInQuote( final String expr,
+ final int pos ) {
boolean isInQuote = false;
for ( int i = pos - 1; i >= 0; i-- ) {
if ( expr.charAt( i ) == '"' ) {
@@ -2424,7 +2482,7 @@ private static boolean isInQuote( String expr,
private static final String[] NULL_OPERATORS = new String[]{ "== null", "!= null" };
- private static String findNullOrNotNullOperator( String expr ) {
+ private static String findNullOrNotNullOperator( final String expr ) {
for ( String op : NULL_OPERATORS ) {
if ( expr.contains( op ) ) {
return op;
@@ -2433,12 +2491,12 @@ private static String findNullOrNotNullOperator( String expr ) {
return null;
}
- private void parseRhs( RuleModel m,
- String rhs,
- boolean isJavaDialect,
- Map<String, String> boundParams,
- ExpandedDRLInfo expandedDRLInfo,
- PackageDataModelOracle dmo ) {
+ private void parseRhs( final RuleModel m,
+ final String rhs,
+ final boolean isJavaDialect,
+ final Map<String, String> boundParams,
+ final ExpandedDRLInfo expandedDRLInfo,
+ final PackageDataModelOracle dmo ) {
PortableWorkDefinition pwd = null;
Map<String, List<String>> setStatements = new HashMap<String, List<String>>();
Map<String, Integer> setStatementsPosition = new HashMap<String, Integer>();
@@ -2454,7 +2512,8 @@ private void parseRhs( RuleModel m,
if ( expandedDRLInfo.hasDsl ) {
String dslLine = expandedDRLInfo.dslStatementsInRhs.get( lineCounter );
while ( dslLine != null ) {
- m.addRhsItem( toDSLSentence( expandedDRLInfo.rhsDslPatterns, dslLine ) );
+ m.addRhsItem( toDSLSentence( expandedDRLInfo.rhsDslPatterns,
+ dslLine ) );
dslLine = expandedDRLInfo.dslStatementsInRhs.get( ++lineCounter );
}
}
@@ -2463,19 +2522,20 @@ private void parseRhs( RuleModel m,
int modifyBlockEnd = line.lastIndexOf( '}' );
if ( modifiers == null ) {
modifiers = modifyBlockEnd > 0 ?
- line.substring( line.indexOf( '{' ) + 1, modifyBlockEnd ).trim() :
+ line.substring( line.indexOf( '{' ) + 1,
+ modifyBlockEnd ).trim() :
line.substring( line.indexOf( '{' ) + 1 ).trim();
} else if ( modifyBlockEnd != 0 ) {
modifiers += modifyBlockEnd > 0 ?
- line.substring( 0, modifyBlockEnd ).trim() :
+ line.substring( 0,
+ modifyBlockEnd ).trim() :
line;
}
if ( modifyBlockEnd >= 0 ) {
ActionUpdateField action = new ActionUpdateField();
action.setVariable( modifiedVariable );
m.addRhsItem( action );
- addModifiersToAction( modifiedVariable,
- modifiers,
+ addModifiersToAction( modifiers,
action,
boundParams,
dmo,
@@ -2486,7 +2546,8 @@ private void parseRhs( RuleModel m,
}
} else if ( line.startsWith( "insertLogical" ) ) {
String fact = unwrapParenthesis( line );
- String type = getStatementType( fact, factsType );
+ String type = getStatementType( fact,
+ factsType );
if ( type != null ) {
ActionInsertLogicalFact action = new ActionInsertLogicalFact( type );
m.addRhsItem( action );
@@ -2502,7 +2563,8 @@ private void parseRhs( RuleModel m,
}
} else if ( line.startsWith( "insert" ) ) {
String fact = unwrapParenthesis( line );
- String type = getStatementType( fact, factsType );
+ String type = getStatementType( fact,
+ factsType );
if ( type != null ) {
ActionInsertFact action = new ActionInsertFact( type );
m.addRhsItem( action );
@@ -2532,19 +2594,21 @@ private void parseRhs( RuleModel m,
} else if ( line.startsWith( "modify" ) ) {
int modifyBlockEnd = line.lastIndexOf( '}' );
if ( modifyBlockEnd > 0 ) {
- String variable = line.substring( line.indexOf( '(' ) + 1, line.indexOf( ')' ) ).trim();
+ String variable = line.substring( line.indexOf( '(' ) + 1,
+ line.indexOf( ')' ) ).trim();
ActionUpdateField action = new ActionUpdateField();
action.setVariable( variable );
m.addRhsItem( action );
- addModifiersToAction( variable,
- line.substring( line.indexOf( '{' ) + 1, modifyBlockEnd ).trim(),
+ addModifiersToAction( line.substring( line.indexOf( '{' ) + 1,
+ modifyBlockEnd ).trim(),
action,
boundParams,
dmo,
m.getImports(),
isJavaDialect );
} else {
- modifiedVariable = line.substring( line.indexOf( '(' ) + 1, line.indexOf( ')' ) ).trim();
+ modifiedVariable = line.substring( line.indexOf( '(' ) + 1,
+ line.indexOf( ')' ) ).trim();
int modifyBlockStart = line.indexOf( '{' );
if ( modifyBlockStart > 0 ) {
modifiers = line.substring( modifyBlockStart + 1 ).trim();
@@ -2563,35 +2627,49 @@ private void parseRhs( RuleModel m,
String statement = line.substring( "wiWorkItem.getParameters().put".length() );
statement = unwrapParenthesis( statement );
int commaPos = statement.indexOf( ',' );
- String name = statement.substring( 0, commaPos ).trim();
+ String name = statement.substring( 0,
+ commaPos ).trim();
String value = statement.substring( commaPos + 1 ).trim();
- pwd.addParameter( buildPortableParameterDefinition( name, value, boundParams ) );
+ pwd.addParameter( buildPortableParameterDefinition( name,
+ value,
+ boundParams ) );
} else if ( line.startsWith( "wim.internalExecuteWorkItem" ) || line.startsWith( "wiWorkItem.setName" ) ) {
// ignore
} else {
int dotPos = line.indexOf( '.' );
int argStart = line.indexOf( '(' );
if ( dotPos > 0 && argStart > dotPos ) {
- String variable = line.substring( 0, dotPos ).trim();
+ String variable = line.substring( 0,
+ dotPos ).trim();
if ( isJavaIdentifier( variable ) ) {
- String methodName = line.substring( dotPos + 1, argStart ).trim();
+ String methodName = line.substring( dotPos + 1,
+ argStart ).trim();
if ( isJavaIdentifier( methodName ) ) {
if ( getSettedField( methodName ) != null ) {
List<String> setters = setStatements.get( variable );
if ( setters == null ) {
setters = new ArrayList<String>();
- setStatements.put( variable, setters );
+ setStatements.put( variable,
+ setters );
}
- setStatementsPosition.put( variable, lineCounter );
+ setStatementsPosition.put( variable,
+ lineCounter );
setters.add( line );
} else if ( methodName.equals( "add" ) && expandedDRLInfo.hasGlobal( variable ) ) {
- String factName = line.substring( argStart + 1, line.lastIndexOf( ')' ) ).trim();
+ String factName = line.substring( argStart + 1,
+ line.lastIndexOf( ')' ) ).trim();
ActionGlobalCollectionAdd actionGlobalCollectionAdd = new ActionGlobalCollectionAdd();
actionGlobalCollectionAdd.setGlobalName( variable );
actionGlobalCollectionAdd.setFactName( factName );
m.addRhsItem( actionGlobalCollectionAdd );
} else {
- m.addRhsItem( getActionCallMethod( m, isJavaDialect, boundParams, dmo, line, variable, methodName ) );
+ m.addRhsItem( getActionCallMethod( m,
+ isJavaDialect,
+ boundParams,
+ dmo,
+ line,
+ variable,
+ methodName ) );
}
continue;
}
@@ -2601,14 +2679,18 @@ private void parseRhs( RuleModel m,
int eqPos = line.indexOf( '=' );
boolean addFreeFormLine = line.trim().length() > 0;
if ( eqPos > 0 ) {
- String field = line.substring( 0, eqPos ).trim();
+ String field = line.substring( 0,
+ eqPos ).trim();
if ( "java.text.SimpleDateFormat sdf".equals( field ) || "org.drools.core.process.instance.WorkItemManager wim".equals( field ) ) {
addFreeFormLine = false;
}
String[] split = field.split( " " );
if ( split.length == 2 ) {
- factsType.put( split[ 1 ], split[ 0 ] );
- addFreeFormLine &= !isInsertedFact( lines, lineCounter, split[ 1 ] );
+ factsType.put( split[ 1 ],
+ split[ 0 ] );
+ addFreeFormLine &= !isInsertedFact( lines,
+ lineCounter,
+ split[ 1 ] );
}
}
if ( addFreeFormLine ) {
@@ -2627,42 +2709,39 @@ private void parseRhs( RuleModel m,
dmo,
m.getImports(),
isJavaDialect );
- m.addRhsItem( action, setStatementsPosition.get( entry.getKey() ) );
+ m.addRhsItem( action,
+ setStatementsPosition.get( entry.getKey() ) );
}
if ( expandedDRLInfo.hasDsl ) {
String dslLine = expandedDRLInfo.dslStatementsInRhs.get( ++lineCounter );
while ( dslLine != null ) {
- m.addRhsItem( toDSLSentence( expandedDRLInfo.rhsDslPatterns, dslLine ) );
+ m.addRhsItem( toDSLSentence( expandedDRLInfo.rhsDslPatterns,
+ dslLine ) );
dslLine = expandedDRLInfo.dslStatementsInRhs.get( ++lineCounter );
}
}
}
- private ActionCallMethod getActionCallMethod(
- RuleModel model,
- boolean isJavaDialect,
- Map<String, String> boundParams,
- PackageDataModelOracle dmo,
- String line,
- String variable,
- String methodName ) {
+ private ActionCallMethod getActionCallMethod( final RuleModel model,
+ final boolean isJavaDialect,
+ final Map<String, String> boundParams,
+ final PackageDataModelOracle dmo,
+ final String line,
+ final String variable,
+ final String methodName ) {
- return new ActionCallMethodBuilder(
- model,
- dmo,
- isJavaDialect,
- boundParams
- ).get(
- variable,
- methodName,
- unwrapParenthesis( line ).split( "," )
- );
+ return new ActionCallMethodBuilder( model,
+ dmo,
+ isJavaDialect,
+ boundParams ).get( variable,
+ methodName,
+ unwrapParenthesis( line ).split( "," ) );
}
- private boolean isInsertedFact( String[] lines,
- int lineCounter,
- String fact ) {
+ private boolean isInsertedFact( final String[] lines,
+ final int lineCounter,
+ final String fact ) {
for ( int i = lineCounter; i < lines.length; i++ ) {
String line = lines[ i ].trim();
if ( line.startsWith( "insert" ) ) {
@@ -2674,17 +2753,20 @@ private boolean isInsertedFact( String[] lines,
return false;
}
- private DSLSentence toDSLSentence( List<String> dslPatterns,
- String dslLine ) {
+ private DSLSentence toDSLSentence( final List<String> dslPatterns,
+ final String dslLine ) {
DSLSentence dslSentence = new DSLSentence();
for ( String dslPattern : dslPatterns ) {
// Dollar breaks the matcher, need to escape them.
- dslPattern = dslPattern.replace( "$", "\\$" );
+ dslPattern = dslPattern.replace( "$",
+ "\\$" );
//A DSL Pattern can contain Regex itself, for example "When the ages is less than {num:1?[0-9]?[0-9]}"
- String regex = dslPattern.replaceAll( "\\{.*?\\}", "(.*)" );
+ String regex = dslPattern.replaceAll( "\\{.*?\\}",
+ "(.*)" );
Matcher matcher = Pattern.compile( regex ).matcher( dslLine );
if ( matcher.matches() ) {
- dslPattern = dslPattern.replace( "\\$", "$" );
+ dslPattern = dslPattern.replace( "\\$",
+ "$" );
dslSentence.setDefinition( dslPattern );
for ( int i = 0; i < matcher.groupCount(); i++ ) {
dslSentence.getValues().get( i ).setValue( matcher.group( i + 1 ) );
@@ -2696,9 +2778,9 @@ private DSLSentence toDSLSentence( List<String> dslPatterns,
return dslSentence;
}
- private PortableParameterDefinition buildPortableParameterDefinition( String name,
- String value,
- Map<String, String> boundParams ) {
+ private PortableParameterDefinition buildPortableParameterDefinition( final String name,
+ final String value,
+ final Map<String, String> boundParams ) {
PortableParameterDefinition paramDef;
String type = boundParams.get( value );
if ( type != null ) {
@@ -2720,7 +2802,8 @@ private PortableParameterDefinition buildPortableParameterDefinition( String nam
( (PortableBooleanParameterDefinition) paramDef ).setValue( b );
} else if ( value.startsWith( "\"" ) ) {
paramDef = new PortableStringParameterDefinition();
- ( (PortableStringParameterDefinition) paramDef ).setValue( value.substring( 1, value.length() - 1 ) );
+ ( (PortableStringParameterDefinition) paramDef ).setValue( value.substring( 1,
+ value.length() - 1 ) );
} else if ( Character.isDigit( value.charAt( 0 ) ) ) {
if ( value.endsWith( "f" ) ) {
paramDef = new PortableFloatParameterDefinition();
@@ -2732,17 +2815,18 @@ private PortableParameterDefinition buildPortableParameterDefinition( String nam
} else {
throw new RuntimeException( "Unknown parameter " + value );
}
- paramDef.setName( name.substring( 1, name.length() - 1 ) );
+ paramDef.setName( name.substring( 1,
+ name.length() - 1 ) );
return paramDef;
}
- private void addSettersToAction( Map<String, List<String>> setStatements,
- String variable,
- ActionFieldList action,
- Map<String, String> boundParams,
- PackageDataModelOracle dmo,
- Imports imports,
- boolean isJavaDialect ) {
+ private void addSettersToAction( final Map<String, List<String>> setStatements,
+ final String variable,
+ final ActionFieldList action,
+ final Map<String, String> boundParams,
+ final PackageDataModelOracle dmo,
+ final Imports imports,
+ final boolean isJavaDialect ) {
addSettersToAction( setStatements.remove( variable ),
action,
boundParams,
@@ -2751,43 +2835,56 @@ private void addSettersToAction( Map<String, List<String>> setStatements,
isJavaDialect );
}
- private void addSettersToAction( List<String> setters,
- ActionFieldList action,
- Map<String, String> boundParams,
- PackageDataModelOracle dmo,
- Imports imports,
- boolean isJavaDialect ) {
+ private void addSettersToAction( final List<String> setters,
+ final ActionFieldList action,
+ final Map<String, String> boundParams,
+ final PackageDataModelOracle dmo,
+ final Imports imports,
+ final boolean isJavaDialect ) {
if ( setters != null ) {
for ( String statement : setters ) {
int dotPos = statement.indexOf( '.' );
int argStart = statement.indexOf( '(' );
- String methodName = statement.substring( dotPos + 1, argStart ).trim();
- addSetterToAction( action, boundParams, dmo, imports, isJavaDialect, statement, methodName );
+ String methodName = statement.substring( dotPos + 1,
+ argStart ).trim();
+ addSetterToAction( action,
+ boundParams,
+ dmo,
+ imports,
+ isJavaDialect,
+ statement,
+ methodName );
}
}
}
- private void addModifiersToAction( String variable,
- String modifiers,
- ActionFieldList action,
- Map<String, String> boundParams,
- PackageDataModelOracle dmo,
- Imports imports,
- boolean isJavaDialect ) {
+ private void addModifiersToAction( final String modifiers,
+ final ActionFieldList action,
+ final Map<String, String> boundParams,
+ final PackageDataModelOracle dmo,
+ final Imports imports,
+ final boolean isJavaDialect ) {
for ( String statement : splitArgumentsList( modifiers ) ) {
int argStart = statement.indexOf( '(' );
- String methodName = statement.substring( 0, argStart ).trim();
- addSetterToAction( action, boundParams, dmo, imports, isJavaDialect, statement, methodName );
- }
- }
-
- private void addSetterToAction( ActionFieldList action,
- Map<String, String> boundParams,
- PackageDataModelOracle dmo,
- Imports imports,
- boolean isJavaDialect,
- String statement,
- String methodName ) {
+ String methodName = statement.substring( 0,
+ argStart ).trim();
+ addSetterToAction( action,
+ boundParams,
+ dmo,
+ imports,
+ isJavaDialect,
+ statement,
+ methodName );
+ }
+ }
+
+ private void addSetterToAction( final ActionFieldList action,
+ final Map<String, String> boundParams,
+ final PackageDataModelOracle dmo,
+ final Imports imports,
+ final boolean isJavaDialect,
+ final String statement,
+ final String methodName ) {
String field = getSettedField( methodName );
String value = unwrapParenthesis( statement );
String dataType = inferDataType( action,
@@ -2804,27 +2901,41 @@ private void addSetterToAction( ActionFieldList action,
field,
value,
dataType,
- boundParams,
- dmo ) );
+ boundParams ) );
}
- private ActionFieldValue buildFieldValue( boolean isJavaDialect,
+ private ActionFieldValue buildFieldValue( final boolean isJavaDialect,
String field,
- String value,
- String dataType,
- Map<String, String> boundParams,
- PackageDataModelOracle dmo ) {
+ final String value,
+ final String dataType,
+ final Map<String, String> boundParams ) {
if ( value.contains( "wiWorkItem.getResult" ) ) {
field = field.substring( 0, 1 ).toUpperCase() + field.substring( 1 );
String wiParam = field.substring( "Results".length() );
if ( wiParam.equals( "BooleanResult" ) ) {
- return new ActionWorkItemFieldValue( field, DataType.TYPE_BOOLEAN, "WorkItem", wiParam, Boolean.class.getName() );
+ return new ActionWorkItemFieldValue( field,
+ DataType.TYPE_BOOLEAN,
+ "WorkItem",
+ wiParam,
+ Boolean.class.getName() );
} else if ( wiParam.equals( "StringResult" ) ) {
- return new ActionWorkItemFieldValue( field, DataType.TYPE_STRING, "WorkItem", wiParam, String.class.getName() );
+ return new ActionWorkItemFieldValue( field,
+ DataType.TYPE_STRING,
+ "WorkItem",
+ wiParam,
+ String.class.getName() );
} else if ( wiParam.equals( "IntegerResult" ) ) {
- return new ActionWorkItemFieldValue( field, DataType.TYPE_NUMERIC_INTEGER, "WorkItem", wiParam, Integer.class.getName() );
+ return new ActionWorkItemFieldValue( field,
+ DataType.TYPE_NUMERIC_INTEGER,
+ "WorkItem",
+ wiParam,
+ Integer.class.getName() );
} else if ( wiParam.equals( "FloatResult" ) ) {
- return new ActionWorkItemFieldValue( field, DataType.TYPE_NUMERIC_FLOAT, "WorkItem", wiParam, Float.class.getName() );
+ return new ActionWorkItemFieldValue( field,
+ DataType.TYPE_NUMERIC_FLOAT,
+ "WorkItem",
+ wiParam,
+ Float.class.getName() );
}
}
ActionFieldValue fieldValue = new ActionFieldValue( field,
@@ -2840,7 +2951,7 @@ private ActionFieldValue buildFieldValue( boolean isJavaDialect,
return fieldValue;
}
- private boolean isJavaIdentifier( String name ) {
+ private boolean isJavaIdentifier( final String name ) {
if ( name == null || name.length() == 0 || !Character.isJavaIdentifierStart( name.charAt( 0 ) ) ) {
return false;
}
@@ -2852,11 +2963,12 @@ private boolean isJavaIdentifier( String name ) {
return true;
}
- private String getSettedField( String methodName ) {
+ private String getSettedField( final String methodName ) {
if ( methodName.length() > 3 && methodName.startsWith( "set" ) ) {
String field = methodName.substring( 3 );
if ( Character.isUpperCase( field.charAt( 0 ) ) ) {
- return field.substring( 0, 1 ).toLowerCase() + field.substring( 1 );
+ return field.substring( 0,
+ 1 ).toLowerCase() + field.substring( 1 );
} else {
return field;
}
@@ -2864,13 +2976,14 @@ private String getSettedField( String methodName ) {
return null;
}
- private String getStatementType( String fact,
- Map<String, String> factsType ) {
+ private String getStatementType( final String fact,
+ final Map<String, String> factsType ) {
String type = null;
if ( fact.startsWith( "new " ) ) {
String inserted = fact.substring( 4 ).trim();
if ( inserted.endsWith( "()" ) ) {
- type = inserted.substring( 0, inserted.length() - 2 ).trim();
+ type = inserted.substring( 0,
+ inserted.length() - 2 ).trim();
}
} else {
type = factsType.get( fact );
@@ -2878,23 +2991,29 @@ private String getStatementType( String fact,
return type;
}
- private Expr parseExpr( String expr,
- Map<String, String> boundParams,
- PackageDataModelOracle dmo ) {
+ private Expr parseExpr( final String expr,
+ final Map<String, String> boundParams,
+ final PackageDataModelOracle dmo ) {
List<String> splittedExpr = splitExpression( expr );
if ( splittedExpr.size() == 1 ) {
String singleExpr = splittedExpr.get( 0 );
if ( singleExpr.startsWith( "(" ) ) {
- return parseExpr( singleExpr.substring( 1 ), boundParams, dmo );
+ return parseExpr( singleExpr.substring( 1 ),
+ boundParams,
+ dmo );
} else if ( singleExpr.startsWith( "eval" ) ) {
return new EvalExpr( unwrapParenthesis( singleExpr ) );
} else {
- return new SimpleExpr( singleExpr, boundParams, dmo );
+ return new SimpleExpr( singleExpr,
+ boundParams,
+ dmo );
}
}
ComplexExpr complexExpr = new ComplexExpr( splittedExpr.get( 1 ) );
for ( int i = 0; i < splittedExpr.size(); i += 2 ) {
- complexExpr.subExprs.add( parseExpr( splittedExpr.get( i ), boundParams, dmo ) );
+ complexExpr.subExprs.add( parseExpr( splittedExpr.get( i ),
+ boundParams,
+ dmo ) );
}
return complexExpr;
}
@@ -2903,7 +3022,7 @@ private enum SplitterState {
START, EXPR, PIPE, OR, AMPERSAND, AND, NESTED
}
- private List<String> splitExpression( String expr ) {
+ private List<String> splitExpression( final String expr ) {
List<String> splittedExpr = new ArrayList<String>();
int nestingLevel = 0;
SplitterState status = SplitterState.START;
@@ -3003,8 +3122,8 @@ private List<String> splitExpression( String expr ) {
private interface Expr {
- FieldConstraint asFieldConstraint( RuleModel m,
- FactPattern factPattern );
+ FieldConstraint asFieldConstraint( final RuleModel m,
+ final FactPattern factPattern );
}
private static class SimpleExpr implements Expr {
@@ -3013,16 +3132,16 @@ private static class SimpleExpr implements Expr {
private final Map<String, String> boundParams;
private final PackageDataModelOracle dmo;
- private SimpleExpr( String expr,
- Map<String, String> boundParams,
- PackageDataModelOracle dmo ) {
+ private SimpleExpr( final String expr,
+ final Map<String, String> boundParams,
+ final PackageDataModelOracle dmo ) {
this.expr = expr;
this.boundParams = boundParams;
this.dmo = dmo;
}
- public FieldConstraint asFieldConstraint( RuleModel m,
- FactPattern factPattern ) {
+ public FieldConstraint asFieldConstraint( final RuleModel m,
+ final FactPattern factPattern ) {
String fieldName = expr;
String value = null;
@@ -3042,17 +3161,18 @@ public FieldConstraint asFieldConstraint( RuleModel m,
}
}
+ boolean isExpression = fieldName.contains( "." ) || fieldName.endsWith( "()" );
return createFieldConstraint( m,
factPattern,
fieldName,
value,
operator,
- fieldName.contains( "." ) );
+ isExpression );
}
- private SingleFieldConstraint createNullCheckFieldConstraint( RuleModel m,
- FactPattern factPattern,
- String fieldName ) {
+ private SingleFieldConstraint createNullCheckFieldConstraint( final RuleModel m,
+ final FactPattern factPattern,
+ final String fieldName ) {
return createFieldConstraint( m,
factPattern,
fieldName,
@@ -3061,16 +3181,17 @@ private SingleFieldConstraint createNullCheckFieldConstraint( RuleModel m,
true );
}
- private SingleFieldConstraint createFieldConstraint( RuleModel m,
- FactPattern factPattern,
- String fieldName,
+ private SingleFieldConstraint createFieldConstraint( final RuleModel m,
+ final FactPattern factPattern,
+ final String fieldName,
String value,
- String operator,
- boolean isExpression ) {
+ final String operator,
+ final boolean isExpression ) {
String operatorParams = null;
if ( value != null && value.startsWith( "[" ) ) {
int endSquare = value.indexOf( ']' );
- operatorParams = value.substring( 1, endSquare ).trim();
+ operatorParams = value.substring( 1,
+ endSquare ).trim();
value = value.substring( endSquare + 1 ).trim();
}
@@ -3089,11 +3210,13 @@ private SingleFieldConstraint createFieldConstraint( RuleModel m,
if ( operatorParams != null ) {
int i = 0;
for ( String param : operatorParams.split( "," ) ) {
- ( (BaseSingleFieldConstraint) fieldConstraint ).setParameter( "" + i++, param.trim() );
+ fieldConstraint.setParameter( "" + i++,
+ param.trim() );
}
- ( (BaseSingleFieldConstraint) fieldConstraint ).setParameter( "org.drools.workbench.models.commons.backend.rule.visibleParameterSet", "" + i );
- ( (BaseSingleFieldConstraint) fieldConstraint ).setParameter( "org.drools.workbench.models.commons.backend.rule.operatorParameterGenerator",
- "org.drools.workbench.models.commons.backend.rule.CEPOperatorParameterDRLBuilder" );
+ fieldConstraint.setParameter( "org.drools.workbench.models.commons.backend.rule.visibleParameterSet",
+ "" + i );
+ fieldConstraint.setParameter( "org.drools.workbench.models.commons.backend.rule.operatorParameterGenerator",
+ "org.drools.workbench.models.commons.backend.rule.CEPOperatorParameterDRLBuilder" );
}
if ( fieldName.equals( "this" ) && ( operator == null || operator.equals( "!= null" ) ) ) {
@@ -3101,7 +3224,9 @@ private SingleFieldConstraint createFieldConstraint( RuleModel m,
}
fieldConstraint.setFactType( factPattern.getFactType() );
- ModelField field = findField( findFields( dmo, m, factPattern.getFactType() ),
+ ModelField field = findField( findFields( m,
+ dmo,
+ factPattern.getFactType() ),
fieldConstraint.getFieldName() );
if ( field != null && ( fieldConstraint.getFieldType() == null || fieldConstraint.getFieldType().trim().length() == 0 ) ) {
@@ -3110,11 +3235,11 @@ private SingleFieldConstraint createFieldConstraint( RuleModel m,
return fieldConstraint;
}
- private SingleFieldConstraint createExpressionBuilderConstraint( RuleModel m,
- FactPattern factPattern,
- String fieldName,
- String operator,
- String value ) {
+ private SingleFieldConstraint createExpressionBuilderConstraint( final RuleModel m,
+ final FactPattern factPattern,
+ final String fieldName,
+ final String operator,
+ final String value ) {
// TODO: we should find a way to know when the expression uses a getter and in this case create a plain SingleFieldConstraint
//int dotPos = fieldName.lastIndexOf('.');
//SingleFieldConstraint con = createSingleFieldConstraint(dotPos > 0 ? fieldName.substring(dotPos+1) : fieldName, operator, value);
@@ -3125,6 +3250,28 @@ private SingleFieldConstraint createExpressionBuilderConstraint( RuleModel m,
operator,
value );
+ return con;
+ }
+
+ private SingleFieldConstraint createSingleFieldConstraint( final RuleModel m,
+ final FactPattern factPattern,
+ String fieldName,
+ final String operator,
+ final String value ) {
+ SingleFieldConstraint con = new SingleFieldConstraint();
+ fieldName = setFieldBindingOnContraint( factPattern.getFactType(),
+ fieldName,
+ m,
+ con,
+ boundParams );
+ con.setFieldName( fieldName );
+ setOperatorAndValueOnConstraint( m,
+ operator,
+ value,
+ factPattern,
+ con );
+
+ //Setup parent relationships for SingleFieldConstraints
for ( FieldConstraint fieldConstraint : factPattern.getFieldConstraints() ) {
if ( fieldConstraint instanceof SingleFieldConstraint ) {
SingleFieldConstraint sfc = (SingleFieldConstraint) fieldConstraint;
@@ -3138,33 +3285,18 @@ private SingleFieldConstraint createExpressionBuilderConstraint( RuleModel m,
}
}
- if ( con.getParent() == null && !( con instanceof SingleFieldConstraintEBLeftSide ) ) {
+ if ( con.getParent() == null ) {
con.setParent( createParentFor( m, factPattern, fieldName ) );
}
return con;
}
- private SingleFieldConstraint createSingleFieldConstraint( RuleModel m,
- FactPattern factPattern,
- String fieldName,
- String operator,
- String value ) {
- SingleFieldConstraint con = new SingleFieldConstraint();
- fieldName = setFieldBindingOnContraint( factPattern.getFactType(),
- fieldName,
- m, con,
- boundParams );
- con.setFieldName( fieldName );
- setOperatorAndValueOnConstraint( m, operator, value, factPattern, con );
- return con;
- }
-
- private SingleFieldConstraintEBLeftSide createSingleFieldConstraintEBLeftSide( RuleModel m,
- FactPattern factPattern,
+ private SingleFieldConstraintEBLeftSide createSingleFieldConstraintEBLeftSide( final RuleModel m,
+ final FactPattern factPattern,
String fieldName,
- String operator,
- String value ) {
+ final String operator,
+ final String value ) {
SingleFieldConstraintEBLeftSide con = new SingleFieldConstraintEBLeftSide();
fieldName = setFieldBindingOnContraint( factPattern.getFactType(),
@@ -3172,10 +3304,14 @@ private SingleFieldConstraintEBLeftSide createSingleFieldConstraintEBLeftSide( R
m,
con,
boundParams );
- String classType = getFQFactType( m, factPattern.getFactType() );
+ String classType = getFQFactType( m,
+ factPattern.getFactType() );
con.getExpressionLeftSide().appendPart( new ExpressionUnboundFact( factPattern ) );
- parseExpression( m, classType, fieldName, con.getExpressionLeftSide() );
+ parseExpression( m,
+ classType,
+ fieldName,
+ con.getExpressionLeftSide() );
setOperatorAndValueOnConstraint( m,
operator,
@@ -3186,10 +3322,10 @@ private SingleFieldConstraintEBLeftSide createSingleFieldConstraintEBLeftSide( R
return con;
}
- private ExpressionFormLine parseExpression( RuleModel m,
+ private ExpressionFormLine parseExpression( final RuleModel m,
String factType,
- String fieldName,
- ExpressionFormLine expression ) {
+ final String fieldName,
+ final ExpressionFormLine expression ) {
String[] splits = fieldName.split( "\\." );
boolean isBoundParam = false;
@@ -3199,9 +3335,13 @@ private ExpressionFormLine parseExpression( RuleModel m,
isBoundParam = true;
}
- ModelField[] typeFields = findFields( dmo,
- m,
+ //An ExpressionPart can be a Field or a Method
+ ModelField[] typeFields = findFields( m,
+ dmo,
factType );
+ List<MethodInfo> methodInfos = getMethodInfosForType( m,
+ dmo,
+ factType );
//Handle all but last expression part
for ( int i = 0; i < splits.length - 1; i++ ) {
@@ -3210,6 +3350,7 @@ private ExpressionFormLine parseExpression( RuleModel m,
expression.appendPart( new ExpressionField( expressionPart,
factType,
DataType.TYPE_THIS ) );
+
} else if ( isBoundParam ) {
ModelField currentFact = findFact( dmo.getProjectModelFields(),
factType );
@@ -3217,18 +3358,35 @@ private ExpressionFormLine parseExpression( RuleModel m,
currentFact.getClassName(),
currentFact.getType() ) );
isBoundParam = false;
+
} else {
+ //An ExpressionPart can be a Field or a Method
+ String currentClassName = null;
ModelField currentField = findField( typeFields,
expressionPart );
+ if ( currentField != null ) {
+ currentClassName = currentField.getClassName();
+ }
+ MethodInfo currentMethodInfo = findMethodInfo( methodInfos,
+ expressionPart );
+ if ( currentMethodInfo != null ) {
+ currentClassName = currentMethodInfo.getReturnClassType();
+ }
- processExpressionPart( factType,
+ processExpressionPart( m,
+ factType,
currentField,
+ currentMethodInfo,
expression,
expressionPart );
- typeFields = findFields( dmo,
- m,
- currentField.getClassName() );
+ //Refresh field and method information based
+ typeFields = findFields( m,
+ dmo,
+ currentClassName );
+ methodInfos = getMethodInfosForType( m,
+ dmo,
+ currentClassName );
}
}
@@ -3236,9 +3394,13 @@ private ExpressionFormLine parseExpression( RuleModel m,
String expressionPart = normalizeExpressionPart( splits[ splits.length - 1 ] );
ModelField currentField = findField( typeFields,
expressionPart );
+ MethodInfo currentMethodInfo = findMethodInfo( methodInfos,
+ expressionPart );
- processExpressionPart( factType,
+ processExpressionPart( m,
+ factType,
currentField,
+ currentMethodInfo,
expression,
expressionPart );
@@ -3248,41 +3410,34 @@ private ExpressionFormLine parseExpression( RuleModel m,
private String normalizeExpressionPart( String expressionPart ) {
int parenthesisPos = expressionPart.indexOf( '(' );
if ( parenthesisPos > 0 ) {
- expressionPart = expressionPart.substring( 0, parenthesisPos );
+ expressionPart = expressionPart.substring( 0,
+ parenthesisPos );
}
return expressionPart.trim();
}
- private void processExpressionPart( final String factType,
+ private void processExpressionPart( final RuleModel m,
+ final String factType,
final ModelField currentField,
+ final MethodInfo currentMethodInfo,
final ExpressionFormLine expression,
final String expressionPart ) {
if ( currentField == null ) {
- final String previousClassName = expression.getClassType();
- final List<MethodInfo> mis = dmo.getProjectMethodInformation().get( previousClassName );
- boolean isMethod = false;
- if ( mis != null ) {
- for ( MethodInfo mi : mis ) {
- if ( mi.getName().equals( expressionPart ) ) {
- expression.appendPart( new ExpressionMethod( mi.getName(),
- mi.getReturnClassType(),
- mi.getGenericType(),
- mi.getParametricReturnType() ) );
- isMethod = true;
- break;
- }
- }
- }
- if ( isMethod == false ) {
+ boolean isMethod = currentMethodInfo != null;
+ if ( isMethod ) {
+ expression.appendPart( new ExpressionMethod( currentMethodInfo.getName(),
+ currentMethodInfo.getReturnClassType(),
+ currentMethodInfo.getGenericType(),
+ currentMethodInfo.getParametricReturnType() ) );
+ } else {
expression.appendPart( new ExpressionText( expressionPart ) );
}
} else if ( "Collection".equals( currentField.getType() ) ) {
- expression.appendPart(
- new ExpressionCollection( expressionPart,
- currentField.getClassName(),
- currentField.getType(),
- dmo.getProjectFieldParametersType().get( factType + "#" + expressionPart ) )
+ expression.appendPart( new ExpressionCollection( expressionPart,
+ currentField.getClassName(),
+ currentField.getType(),
+ dmo.getProjectFieldParametersType().get( factType + "#" + expressionPart ) )
);
} else {
expression.appendPart( new ExpressionField( expressionPart,
@@ -3292,8 +3447,8 @@ private void processExpressionPart( final String factType,
}
- private String getFQFactType( RuleModel ruleModel,
- String factType ) {
+ private String getFQFactType( final RuleModel ruleModel,
+ final String factType ) {
Set<String> factTypes = dmo.getProjectModelFields().keySet();
@@ -3316,8 +3471,8 @@ private String getFQFactType( RuleModel ruleModel,
return factType;
}
- private ModelField findFact( Map<String, ModelField[]> modelFields,
- String factType ) {
+ private ModelField findFact( final Map<String, ModelField[]> modelFields,
+ final String factType ) {
final ModelField[] typeFields = modelFields.get( factType );
if ( typeFields == null ) {
return null;
@@ -3330,35 +3485,41 @@ private ModelField findFact( Map<String, ModelField[]> modelFields,
return null;
}
- private SingleFieldConstraint createParentFor( RuleModel m,
- FactPattern factPattern,
- String fieldName ) {
+ private SingleFieldConstraint createParentFor( final RuleModel m,
+ final FactPattern factPattern,
+ final String fieldName ) {
int dotPos = fieldName.lastIndexOf( '.' );
if ( dotPos > 0 ) {
- SingleFieldConstraint constraint = createNullCheckFieldConstraint( m, factPattern, fieldName.substring( 0, dotPos ) );
+ SingleFieldConstraint constraint = createNullCheckFieldConstraint( m,
+ factPattern,
+ fieldName.substring( 0,
+ dotPos ) );
factPattern.addConstraint( constraint );
return constraint;
}
return null;
}
- private String setFieldBindingOnContraint(
- String factType,
- String fieldName,
- RuleModel model,
- SingleFieldConstraint con,
- Map<String, String> boundParams ) {
+ private String setFieldBindingOnContraint( final String factType,
+ String fieldName,
+ final RuleModel model,
+ final SingleFieldConstraint con,
+ final Map<String, String> boundParams ) {
int colonPos = fieldName.indexOf( ':' );
if ( colonPos > 0 ) {
- String fieldBinding = fieldName.substring( 0, colonPos ).trim();
+ String fieldBinding = fieldName.substring( 0,
+ colonPos ).trim();
con.setFieldBinding( fieldBinding );
fieldName = fieldName.substring( colonPos + 1 ).trim();
- ModelField[] fields = findFields( dmo, model, factType );
+ ModelField[] fields = findFields( model,
+ dmo,
+ factType );
if ( fields != null ) {
for ( ModelField field : fields ) {
if ( field.getName().equals( fieldName ) ) {
- boundParams.put( fieldBinding, field.getType() );
+ boundParams.put( fieldBinding,
+ field.getType() );
}
}
}
@@ -3367,11 +3528,11 @@ private String setFieldBindingOnContraint(
return fieldName;
}
- private String setOperatorAndValueOnConstraint( RuleModel m,
- String operator,
- String value,
- FactPattern factPattern,
- SingleFieldConstraint con ) {
+ private String setOperatorAndValueOnConstraint( final RuleModel m,
+ final String operator,
+ final String value,
+ final FactPattern factPattern,
+ final SingleFieldConstraint con ) {
con.setOperator( operator );
String type = null;
boolean isAnd = false;
@@ -3379,7 +3540,11 @@ private String setOperatorAndValueOnConstraint( RuleModel m,
if ( value != null ) {
isAnd = value.contains( "&&" );
splittedValue = isAnd ? value.split( "\\&\\&" ) : value.split( "\\|\\|" );
- type = setValueOnConstraint( m, operator, factPattern, con, splittedValue[ 0 ].trim() );
+ type = setValueOnConstraint( m,
+ operator,
+ factPattern,
+ con,
+ splittedValue[ 0 ].trim() );
}
if ( splittedValue.length > 1 ) {
@@ -3391,23 +3556,28 @@ private String setOperatorAndValueOnConstraint( RuleModel m,
connectiveConstraints[ i ] = new ConnectiveConstraint();
connectiveConstraints[ i ].setOperator( ( isAnd ? "&& " : "|| " ) + connectiveOperator );
- setValueOnConstraint( m, operator, factPattern, connectiveConstraints[ i ], connectiveValue );
+ setValueOnConstraint( m,
+ operator,
+ factPattern,
+ connectiveConstraints[ i ],
+ connectiveValue );
}
con.setConnectives( connectiveConstraints );
}
return type;
}
- private String setValueOnConstraint( RuleModel m,
- String operator,
- FactPattern factPattern,
- BaseSingleFieldConstraint con,
+ private String setValueOnConstraint( final RuleModel m,
+ final String operator,
+ final FactPattern factPattern,
+ final BaseSingleFieldConstraint con,
String value ) {
String type = null;
if ( value.startsWith( "\"" ) ) {
type = DataType.TYPE_STRING;
con.setConstraintValueType( SingleFieldConstraint.TYPE_LITERAL );
- con.setValue( value.substring( 1, value.length() - 1 ) );
+ con.setValue( value.substring( 1,
+ value.length() - 1 ) );
} else if ( value.startsWith( "(" ) ) {
if ( operator != null && operator.contains( "in" ) ) {
value = unwrapParenthesis( value );
@@ -3428,8 +3598,12 @@ private String setValueOnConstraint( RuleModel m,
con ) ) {
type = DataType.TYPE_COMPARABLE;
con.setConstraintValueType( SingleFieldConstraint.TYPE_ENUM );
- } else if ( value.indexOf( '.' ) > 0 && boundParams.containsKey( value.substring( 0, value.indexOf( '.' ) ).trim() ) ) {
- con.setExpressionValue( parseExpression( m, null, value, new ExpressionFormLine() ) );
+ } else if ( value.indexOf( '.' ) > 0 && boundParams.containsKey( value.substring( 0,
+ value.indexOf( '.' ) ).trim() ) ) {
+ con.setExpressionValue( parseExpression( m,
+ null,
+ value,
+ new ExpressionFormLine() ) );
con.setConstraintValueType( BaseSingleFieldConstraint.TYPE_EXPR_BUILDER_VALUE );
value = "";
} else {
@@ -3438,10 +3612,12 @@ private String setValueOnConstraint( RuleModel m,
} else {
if ( value.endsWith( "I" ) ) {
type = DataType.TYPE_NUMERIC_BIGINTEGER;
- value = value.substring( 0, value.length() - 1 );
+ value = value.substring( 0,
+ value.length() - 1 );
} else if ( value.endsWith( "B" ) ) {
type = DataType.TYPE_NUMERIC_BIGDECIMAL;
- value = value.substring( 0, value.length() - 1 );
+ value = value.substring( 0,
+ value.length() - 1 );
} else if ( value.endsWith( "f" ) ) {
type = DataType.TYPE_NUMERIC_FLOAT;
} else if ( value.endsWith( "d" ) ) {
@@ -3461,9 +3637,9 @@ private String setValueOnConstraint( RuleModel m,
return type;
}
- private boolean isEnumerationValue( RuleModel ruleModel,
- FactPattern factPattern,
- BaseSingleFieldConstraint con ) {
+ private boolean isEnumerationValue( final RuleModel ruleModel,
+ final FactPattern factPattern,
+ final BaseSingleFieldConstraint con ) {
String factType = null;
String fieldName = null;
if ( con instanceof SingleFieldConstraintEBLeftSide ) {
@@ -3483,7 +3659,8 @@ private boolean isEnumerationValue( RuleModel ruleModel,
return false;
}
- final String fullyQualifiedFactType = getFQFactType( ruleModel, factType );
+ final String fullyQualifiedFactType = getFQFactType( ruleModel,
+ factType );
final String key = fullyQualifiedFactType + "#" + fieldName;
final Map<String, String[]> projectJavaEnumDefinitions = dmo.getProjectJavaEnumDefinitions();
@@ -3496,16 +3673,17 @@ private static class ComplexExpr implements Expr {
private final List<Expr> subExprs = new ArrayList<Expr>();
private final String connector;
- private ComplexExpr( String connector ) {
+ private ComplexExpr( final String connector ) {
this.connector = connector;
}
- public FieldConstraint asFieldConstraint( RuleModel m,
- FactPattern factPattern ) {
+ public FieldConstraint asFieldConstraint( final RuleModel m,
+ final FactPattern factPattern ) {
CompositeFieldConstraint comp = new CompositeFieldConstraint();
comp.setCompositeJunctionType( connector.equals( "&&" ) ? CompositeFieldConstraint.COMPOSITE_TYPE_AND : CompositeFieldConstraint.COMPOSITE_TYPE_OR );
for ( Expr expr : subExprs ) {
- comp.addConstraint( expr.asFieldConstraint( m, factPattern ) );
+ comp.addConstraint( expr.asFieldConstraint( m,
+ factPattern ) );
}
return comp;
}
@@ -3515,12 +3693,12 @@ private static class EvalExpr implements Expr {
private final String expr;
- private EvalExpr( String expr ) {
+ private EvalExpr( final String expr ) {
this.expr = expr;
}
- public FieldConstraint asFieldConstraint( RuleModel m,
- FactPattern factPattern ) {
+ public FieldConstraint asFieldConstraint( final RuleModel m,
+ final FactPattern factPattern ) {
SingleFieldConstraint con = new SingleFieldConstraint();
con.setConstraintValueType( SingleFieldConstraint.TYPE_PREDICATE );
con.setValue( expr );
diff --git a/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/RuleModelPersistenceHelper.java b/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/RuleModelPersistenceHelper.java
index 768e3350b2f..53e039cca27 100644
--- a/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/RuleModelPersistenceHelper.java
+++ b/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/RuleModelPersistenceHelper.java
@@ -25,14 +25,15 @@
class RuleModelPersistenceHelper {
- static String unwrapParenthesis( String s ) {
+ static String unwrapParenthesis( final String s ) {
int start = s.indexOf( '(' );
int end = s.lastIndexOf( ')' );
- return s.substring( start + 1, end ).trim();
+ return s.substring( start + 1,
+ end ).trim();
}
- static String getSimpleFactType( String className,
- PackageDataModelOracle dmo ) {
+ static String getSimpleFactType( final String className,
+ final PackageDataModelOracle dmo ) {
for ( String type : dmo.getProjectModelFields().keySet() ) {
if ( type.equals( className ) ) {
return type.substring( type.lastIndexOf( "." ) + 1 );
@@ -169,10 +170,9 @@ static int inferFieldNature( final String dataType,
return nature;
}
- static ModelField[] findFields(
- PackageDataModelOracle dmo,
- RuleModel m,
- String type ) {
+ static ModelField[] findFields( final RuleModel m,
+ final PackageDataModelOracle dmo,
+ final String type ) {
ModelField[] fields = dmo.getProjectModelFields().get( type );
if ( fields != null ) {
return fields;
@@ -189,9 +189,8 @@ static ModelField[] findFields(
return dmo.getProjectModelFields().get( m.getPackageName() + "." + type );
}
- static ModelField findField(
- ModelField[] typeFields,
- String fieldName ) {
+ static ModelField findField( final ModelField[] typeFields,
+ final String fieldName ) {
if ( typeFields != null && fieldName != null ) {
for ( ModelField typeField : typeFields ) {
if ( typeField.getName().equals( fieldName ) ) {
@@ -202,11 +201,24 @@ static ModelField findField(
return null;
}
- static String inferDataType( ActionFieldList action,
- String field,
- Map<String, String> boundParams,
- PackageDataModelOracle dmo,
- Imports imports ) {
+ static MethodInfo findMethodInfo( final List<MethodInfo> methodInfos,
+ final String fieldName ) {
+ if ( methodInfos != null && fieldName != null ) {
+ for ( MethodInfo methodInfo : methodInfos ) {
+ if ( methodInfo.getName().equals( fieldName ) ) {
+ return methodInfo;
+ }
+ }
+ }
+ return null;
+
+ }
+
+ static String inferDataType( final ActionFieldList action,
+ final String field,
+ final Map<String, String> boundParams,
+ final PackageDataModelOracle dmo,
+ final Imports imports ) {
String factType = null;
if ( action instanceof ActionInsertFact ) {
factType = ( (ActionInsertFact) action ).getFactType();
@@ -250,9 +262,9 @@ static String inferDataType( ActionFieldList action,
return null;
}
- static String inferDataType( String param,
- Map<String, String> boundParams,
- boolean isJavaDialect ) {
+ static String inferDataType( final String param,
+ final Map<String, String> boundParams,
+ final boolean isJavaDialect ) {
if ( param.startsWith( "sdf.parse(\"" ) ) {
return DataType.TYPE_DATE;
} else if ( param.startsWith( "\"" ) ) {
@@ -271,10 +283,10 @@ static String inferDataType( String param,
return DataType.TYPE_NUMERIC;
}
- static String adjustParam( String dataType,
- String param,
- Map<String, String> boundParams,
- boolean isJavaDialect ) {
+ static String adjustParam( final String dataType,
+ final String param,
+ final Map<String, String> boundParams,
+ final boolean isJavaDialect ) {
if ( dataType == DataType.TYPE_DATE ) {
return param.substring( "sdf.parse(\"".length(),
param.length() - 2 );
@@ -302,9 +314,9 @@ static String adjustParam( String dataType,
return param;
}
- static List<MethodInfo> getMethodInfosForType( RuleModel model,
- PackageDataModelOracle dmo,
- String variableType ) {
+ static List<MethodInfo> getMethodInfosForType( final RuleModel model,
+ final PackageDataModelOracle dmo,
+ final String variableType ) {
List<MethodInfo> methods = dmo.getProjectMethodInformation().get( variableType );
if ( methods == null ) {
for ( String imp : model.getImports().getImportStrings() ) {
@@ -318,4 +330,5 @@ static List<MethodInfo> getMethodInfosForType( RuleModel model,
}
return methods;
}
+
}
diff --git a/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceTest.java b/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceTest.java
index 7158f86a99f..b109679b122 100644
--- a/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceTest.java
+++ b/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceTest.java
@@ -16,6 +16,7 @@
package org.drools.workbench.models.commons.backend.rule;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
@@ -25,6 +26,7 @@
import org.drools.workbench.models.datamodel.imports.Import;
import org.drools.workbench.models.datamodel.oracle.DataType;
import org.drools.workbench.models.datamodel.oracle.FieldAccessorsAndMutators;
+import org.drools.workbench.models.datamodel.oracle.MethodInfo;
import org.drools.workbench.models.datamodel.oracle.ModelField;
import org.drools.workbench.models.datamodel.oracle.PackageDataModelOracle;
import org.drools.workbench.models.datamodel.rule.ActionCallMethod;
@@ -4408,6 +4410,18 @@ public void testLHSFormula() {
con2.setValue( "0" );
p.addConstraint( con2 );
+ final HashMap<String, List<MethodInfo>> map = new HashMap<String, List<MethodInfo>>();
+ final ArrayList<MethodInfo> methodInfos = new ArrayList<MethodInfo>();
+ methodInfos.add( new MethodInfo( "intValue",
+ Collections.EMPTY_LIST,
+ "int",
+ null,
+ DataType.TYPE_NUMERIC_INTEGER ) );
+ map.put( "Number",
+ methodInfos );
+
+ when( dmo.getProjectMethodInformation() ).thenReturn( map );
+
String expected = "rule \"test\"\n"
+ "dialect \"mvel\"\n"
+ "when\n"
@@ -4416,7 +4430,8 @@ public void testLHSFormula() {
+ "end";
checkMarshallUnmarshall( expected,
- m );
+ m,
+ dmo );
}
@Test
@@ -4445,6 +4460,18 @@ public void testLHSReturnType() {
con2.setValue( "0" );
p.addConstraint( con2 );
+ final HashMap<String, List<MethodInfo>> map = new HashMap<String, List<MethodInfo>>();
+ final ArrayList<MethodInfo> methodInfos = new ArrayList<MethodInfo>();
+ methodInfos.add( new MethodInfo( "intValue",
+ Collections.EMPTY_LIST,
+ "int",
+ null,
+ DataType.TYPE_NUMERIC_INTEGER ) );
+ map.put( "Number",
+ methodInfos );
+
+ when( dmo.getProjectMethodInformation() ).thenReturn( map );
+
String expected = "rule \"test\"\n"
+ "dialect \"mvel\"\n"
+ "when\n"
@@ -4453,7 +4480,8 @@ public void testLHSReturnType() {
+ "end";
checkMarshallUnmarshall( expected,
- m );
+ m,
+ dmo );
}
@Test
diff --git a/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceUnmarshallingTest.java b/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceUnmarshallingTest.java
index 1bbdce1bc31..d0bafc94790 100644
--- a/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceUnmarshallingTest.java
+++ b/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceUnmarshallingTest.java
@@ -1751,8 +1751,8 @@ public void testNestedFieldConstraints() {
sfp0.getConstraintValueType() );
assertNull( sfp0.getParent() );
- assertTrue( fp.getConstraint( 1 ) instanceof SingleFieldConstraint );
- SingleFieldConstraint sfp1 = (SingleFieldConstraint) fp.getConstraint( 1 );
+ assertTrue( fp.getConstraint( 1 ) instanceof SingleFieldConstraintEBLeftSide );
+ SingleFieldConstraintEBLeftSide sfp1 = (SingleFieldConstraintEBLeftSide) fp.getConstraint( 1 );
assertEquals( "ParentType",
sfp1.getFactType() );
assertEquals( "parentChildField",
@@ -1764,11 +1764,10 @@ public void testNestedFieldConstraints() {
assertNull( sfp1.getValue() );
assertEquals( BaseSingleFieldConstraint.TYPE_UNDEFINED,
sfp1.getConstraintValueType() );
- assertSame( sfp0,
- sfp1.getParent() );
+ assertNull( sfp1.getParent() );
- assertTrue( fp.getConstraint( 2 ) instanceof SingleFieldConstraint );
- SingleFieldConstraint sfp2 = (SingleFieldConstraint) fp.getConstraint( 2 );
+ assertTrue( fp.getConstraint( 2 ) instanceof SingleFieldConstraintEBLeftSide );
+ SingleFieldConstraintEBLeftSide sfp2 = (SingleFieldConstraintEBLeftSide) fp.getConstraint( 2 );
assertEquals( "childField",
sfp2.getFieldName() );
assertEquals( "java.lang.String",
@@ -1779,8 +1778,7 @@ public void testNestedFieldConstraints() {
sfp2.getValue() );
assertEquals( BaseSingleFieldConstraint.TYPE_LITERAL,
sfp2.getConstraintValueType() );
- assertSame( sfp1,
- sfp2.getParent() );
+ assertNull( sfp2.getParent() );
}
@Test
@@ -4980,6 +4978,208 @@ public void testMethodCallWithTwoParametersIntegerAndString() throws Exception {
actionCallMethod.getFieldValue( 1 ).getType() );
}
+ @Test
+ public void testLHSNumberExpressionWithoutThisPrefix() throws Exception {
+ String drl = "package org.mortgages;\n" +
+ "import java.lang.Number\n" +
+ "rule \"test\"\n" +
+ " dialect \"mvel\"\n" +
+ " when\n" +
+ " Number( intValue() > 5 )\n" +
+ " then\n" +
+ "end";
+
+ final HashMap<String, List<MethodInfo>> map = new HashMap<String, List<MethodInfo>>();
+ final ArrayList<MethodInfo> methodInfos = new ArrayList<MethodInfo>();
+ methodInfos.add( new MethodInfo( "intValue",
+ Collections.EMPTY_LIST,
+ "int",
+ null,
+ DataType.TYPE_NUMERIC_INTEGER ) );
+ map.put( "java.lang.Number",
+ methodInfos );
+
+ when( dmo.getProjectMethodInformation() ).thenReturn( map );
+
+ final RuleModel m = RuleModelDRLPersistenceImpl.getInstance().unmarshal( drl,
+ new ArrayList<String>(),
+ dmo );
+
+ assertNotNull( m );
+
+ assertEquals( 1,
+ m.lhs.length );
+
+ assertTrue( m.lhs[ 0 ] instanceof FactPattern );
+ final FactPattern fp = (FactPattern) m.lhs[ 0 ];
+ assertEquals( "Number",
+ fp.getFactType() );
+
+ assertEquals( 1,
+ fp.getNumberOfConstraints() );
+ assertTrue( fp.getConstraint( 0 ) instanceof SingleFieldConstraintEBLeftSide );
+ final SingleFieldConstraintEBLeftSide exp = (SingleFieldConstraintEBLeftSide) fp.getConstraint( 0 );
+ assertEquals( "int",
+ exp.getFieldType() );
+ assertEquals( ">",
+ exp.getOperator() );
+ assertEquals( "5",
+ exp.getValue() );
+
+ assertEquals( 2,
+ exp.getExpressionLeftSide().getParts().size() );
+ assertTrue( exp.getExpressionLeftSide().getParts().get( 0 ) instanceof ExpressionUnboundFact );
+ final ExpressionUnboundFact expPart0 = (ExpressionUnboundFact) exp.getExpressionLeftSide().getParts().get( 0 );
+ assertEquals( "Number",
+ expPart0.getFact().getFactType() );
+
+ assertTrue( exp.getExpressionLeftSide().getParts().get( 1 ) instanceof ExpressionMethod );
+ final ExpressionMethod expPart1 = (ExpressionMethod) exp.getExpressionLeftSide().getParts().get( 1 );
+ assertEquals( "intValue",
+ expPart1.getName() );
+ }
+
+ @Test
+ public void testLHSNumberExpressionWithThisPrefix() throws Exception {
+ String drl = "package org.mortgages;\n" +
+ "import java.lang.Number\n" +
+ "rule \"test\"\n" +
+ " dialect \"mvel\"\n" +
+ " when\n" +
+ " Number( this.intValue() > 5 )\n" +
+ " then\n" +
+ "end";
+
+ final HashMap<String, List<MethodInfo>> map = new HashMap<String, List<MethodInfo>>();
+ final ArrayList<MethodInfo> methodInfos = new ArrayList<MethodInfo>();
+ methodInfos.add( new MethodInfo( "intValue",
+ Collections.EMPTY_LIST,
+ "int",
+ null,
+ DataType.TYPE_NUMERIC_INTEGER ) );
+ map.put( "java.lang.Number",
+ methodInfos );
+
+ when( dmo.getProjectMethodInformation() ).thenReturn( map );
+
+ final RuleModel m = RuleModelDRLPersistenceImpl.getInstance().unmarshal( drl,
+ new ArrayList<String>(),
+ dmo );
+
+ assertNotNull( m );
+
+ assertEquals( 1,
+ m.lhs.length );
+
+ assertTrue( m.lhs[ 0 ] instanceof FactPattern );
+ final FactPattern fp = (FactPattern) m.lhs[ 0 ];
+ assertEquals( "Number",
+ fp.getFactType() );
+
+ assertEquals( 1,
+ fp.getNumberOfConstraints() );
+ assertTrue( fp.getConstraint( 0 ) instanceof SingleFieldConstraintEBLeftSide );
+ final SingleFieldConstraintEBLeftSide exp = (SingleFieldConstraintEBLeftSide) fp.getConstraint( 0 );
+ assertEquals( "int",
+ exp.getFieldType() );
+ assertEquals( ">",
+ exp.getOperator() );
+ assertEquals( "5",
+ exp.getValue() );
+
+ assertEquals( 3,
+ exp.getExpressionLeftSide().getParts().size() );
+ assertTrue( exp.getExpressionLeftSide().getParts().get( 0 ) instanceof ExpressionUnboundFact );
+ final ExpressionUnboundFact expPart0 = (ExpressionUnboundFact) exp.getExpressionLeftSide().getParts().get( 0 );
+ assertEquals( "Number",
+ expPart0.getFact().getFactType() );
+
+ assertTrue( exp.getExpressionLeftSide().getParts().get( 1 ) instanceof ExpressionField );
+ final ExpressionField expPart1 = (ExpressionField) exp.getExpressionLeftSide().getParts().get( 1 );
+ assertEquals( "this",
+ expPart1.getName() );
+
+ assertTrue( exp.getExpressionLeftSide().getParts().get( 2 ) instanceof ExpressionMethod );
+ final ExpressionMethod expPart2 = (ExpressionMethod) exp.getExpressionLeftSide().getParts().get( 2 );
+ assertEquals( "intValue",
+ expPart2.getName() );
+ }
+
+ @Test
+ public void testLHSNestedMethodCalls() throws Exception {
+ String drl = "package org.mortgages;\n" +
+ "rule \"test\"\n" +
+ " dialect \"mvel\"\n" +
+ " when\n" +
+ " Parent( methodToGetChild1().methodToGetChild2().field1 > 5 )\n" +
+ " then\n" +
+ "end";
+
+ addMethodInformation( "Parent",
+ "methodToGetChild1",
+ Collections.EMPTY_LIST,
+ "Child1",
+ null,
+ "Child1" );
+ addMethodInformation( "Child1",
+ "methodToGetChild2",
+ Collections.EMPTY_LIST,
+ "Child2",
+ null,
+ "Child2" );
+ addModelField( "Child2",
+ "field1",
+ "int",
+ DataType.TYPE_NUMERIC_INTEGER );
+
+ final RuleModel m = RuleModelDRLPersistenceImpl.getInstance().unmarshal( drl,
+ new ArrayList<String>(),
+ dmo );
+
+ assertNotNull( m );
+
+ assertEquals( 1,
+ m.lhs.length );
+
+ assertTrue( m.lhs[ 0 ] instanceof FactPattern );
+ final FactPattern fp = (FactPattern) m.lhs[ 0 ];
+ assertEquals( "Parent",
+ fp.getFactType() );
+
+ assertEquals( 1,
+ fp.getNumberOfConstraints() );
+ assertTrue( fp.getConstraint( 0 ) instanceof SingleFieldConstraintEBLeftSide );
+ final SingleFieldConstraintEBLeftSide exp = (SingleFieldConstraintEBLeftSide) fp.getConstraint( 0 );
+ assertEquals( "int",
+ exp.getFieldType() );
+ assertEquals( ">",
+ exp.getOperator() );
+ assertEquals( "5",
+ exp.getValue() );
+
+ assertEquals( 4,
+ exp.getExpressionLeftSide().getParts().size() );
+ assertTrue( exp.getExpressionLeftSide().getParts().get( 0 ) instanceof ExpressionUnboundFact );
+ final ExpressionUnboundFact expPart0 = (ExpressionUnboundFact) exp.getExpressionLeftSide().getParts().get( 0 );
+ assertEquals( "Parent",
+ expPart0.getFact().getFactType() );
+
+ assertTrue( exp.getExpressionLeftSide().getParts().get( 1 ) instanceof ExpressionMethod );
+ final ExpressionMethod expPart1 = (ExpressionMethod) exp.getExpressionLeftSide().getParts().get( 1 );
+ assertEquals( "methodToGetChild1",
+ expPart1.getName() );
+
+ assertTrue( exp.getExpressionLeftSide().getParts().get( 2 ) instanceof ExpressionMethod );
+ final ExpressionMethod expPart2 = (ExpressionMethod) exp.getExpressionLeftSide().getParts().get( 2 );
+ assertEquals( "methodToGetChild2",
+ expPart2.getName() );
+
+ assertTrue( exp.getExpressionLeftSide().getParts().get( 3 ) instanceof ExpressionField );
+ final ExpressionField expPart3 = (ExpressionField) exp.getExpressionLeftSide().getParts().get( 3 );
+ assertEquals( "field1",
+ expPart3.getName() );
+ }
+
private void assertEqualsIgnoreWhitespace( final String expected,
final String actual ) {
final String cleanExpected = expected.replaceAll( "\\s+",
|
c28831a0bdaaa573bfd6c4e837183eb5197876fb
|
hadoop
|
YARN-280. RM does not reject app submission with- invalid tokens (Daryn Sharp via tgraves)--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1425085 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 6e2cca1006e7a..683008d0fec85 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -224,6 +224,9 @@ Release 0.23.6 - UNRELEASED
YARN-266. RM and JHS Web UIs are blank because AppsBlock is not escaping
string properly (Ravi Prakash via jlowe)
+ YARN-280. RM does not reject app submission with invalid tokens
+ (Daryn Sharp via tgraves)
+
Release 0.23.5 - UNRELEASED
INCOMPATIBLE CHANGES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/security/DelegationTokenRenewer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/security/DelegationTokenRenewer.java
index e5abbb7ede9ec..9232190ba3bec 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/security/DelegationTokenRenewer.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/security/DelegationTokenRenewer.java
@@ -276,21 +276,26 @@ public synchronized void addApplication(
Collection <Token<?>> tokens = ts.getAllTokens();
long now = System.currentTimeMillis();
+ // find tokens for renewal, but don't add timers until we know
+ // all renewable tokens are valid
+ Set<DelegationTokenToRenew> dtrs = new HashSet<DelegationTokenToRenew>();
for(Token<?> token : tokens) {
// first renew happens immediately
if (token.isManaged()) {
DelegationTokenToRenew dtr =
new DelegationTokenToRenew(applicationId, token, getConfig(), now,
shouldCancelAtEnd);
-
- addTokenToList(dtr);
-
- setTimerForTokenRenewal(dtr, true);
- if (LOG.isDebugEnabled()) {
- LOG.debug("Registering token for renewal for:" +
- " service = " + token.getService() +
- " for appId = " + applicationId);
- }
+ renewToken(dtr);
+ dtrs.add(dtr);
+ }
+ }
+ for (DelegationTokenToRenew dtr : dtrs) {
+ addTokenToList(dtr);
+ setTimerForTokenRenewal(dtr);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Registering token for renewal for:" +
+ " service = " + dtr.token.getService() +
+ " for appId = " + applicationId);
}
}
}
@@ -315,22 +320,13 @@ public synchronized void run() {
Token<?> token = dttr.token;
try {
- // need to use doAs so that http can find the kerberos tgt
- dttr.expirationDate = UserGroupInformation.getLoginUser()
- .doAs(new PrivilegedExceptionAction<Long>(){
-
- @Override
- public Long run() throws Exception {
- return dttr.token.renew(dttr.conf);
- }
- });
-
+ renewToken(dttr);
if (LOG.isDebugEnabled()) {
LOG.debug("Renewing delegation-token for:" + token.getService() +
"; new expiration;" + dttr.expirationDate);
}
- setTimerForTokenRenewal(dttr, false);// set the next one
+ setTimerForTokenRenewal(dttr);// set the next one
} catch (Exception e) {
LOG.error("Exception renewing token" + token + ". Not rescheduled", e);
removeFailedDelegationToken(dttr);
@@ -347,19 +343,12 @@ public synchronized boolean cancel() {
/**
* set task to renew the token
*/
- private
- void setTimerForTokenRenewal(DelegationTokenToRenew token,
- boolean firstTime) throws IOException {
+ private void setTimerForTokenRenewal(DelegationTokenToRenew token)
+ throws IOException {
// calculate timer time
- long now = System.currentTimeMillis();
- long renewIn;
- if(firstTime) {
- renewIn = now;
- } else {
- long expiresIn = (token.expirationDate - now);
- renewIn = now + expiresIn - expiresIn/10; // little bit before the expiration
- }
+ long expiresIn = token.expirationDate - System.currentTimeMillis();
+ long renewIn = token.expirationDate - expiresIn/10; // little bit before the expiration
// need to create new task every time
TimerTask tTask = new RenewalTimerTask(token);
@@ -368,6 +357,24 @@ void setTimerForTokenRenewal(DelegationTokenToRenew token,
renewalTimer.schedule(token.timerTask, new Date(renewIn));
}
+ // renew a token
+ private void renewToken(final DelegationTokenToRenew dttr)
+ throws IOException {
+ // need to use doAs so that http can find the kerberos tgt
+ // NOTE: token renewers should be responsible for the correct UGI!
+ try {
+ dttr.expirationDate = UserGroupInformation.getLoginUser().doAs(
+ new PrivilegedExceptionAction<Long>(){
+ @Override
+ public Long run() throws Exception {
+ return dttr.token.renew(dttr.conf);
+ }
+ });
+ } catch (InterruptedException e) {
+ throw new IOException(e);
+ }
+ }
+
// cancel a token
private void cancelToken(DelegationTokenToRenew t) {
if(t.shouldCancelAtEnd) {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestDelegationTokenRenewer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestDelegationTokenRenewer.java
index 1c3614e46df37..ad127a9264d9d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestDelegationTokenRenewer.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestDelegationTokenRenewer.java
@@ -357,6 +357,27 @@ public void testDTRenewal () throws Exception {
}
}
+ @Test
+ public void testInvalidDTWithAddApplication() throws Exception {
+ MyFS dfs = (MyFS)FileSystem.get(conf);
+ LOG.info("dfs="+(Object)dfs.hashCode() + ";conf="+conf.hashCode());
+
+ MyToken token = dfs.getDelegationToken(new Text("user1"));
+ token.cancelToken();
+
+ Credentials ts = new Credentials();
+ ts.addToken(token.getKind(), token);
+
+ // register the tokens for renewal
+ ApplicationId appId = BuilderUtils.newApplicationId(0, 0);
+ try {
+ delegationTokenRenewer.addApplication(appId, ts, true);
+ fail("App submission with a cancelled token should have failed");
+ } catch (InvalidToken e) {
+ // expected
+ }
+ }
+
/**
* Basic idea of the test:
* 1. register a token for 2 seconds with no cancel at the end
|
db02e53849878e327f9aa67a9cbe6e919e2b0022
|
intellij-community
|
make find toolwindow work in dumb mode and- correctly navigate--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/UsageView/src/com/intellij/usages/UsageInfo2UsageAdapter.java b/UsageView/src/com/intellij/usages/UsageInfo2UsageAdapter.java
index 90cc9d7917901..77591d3a6c2fc 100644
--- a/UsageView/src/com/intellij/usages/UsageInfo2UsageAdapter.java
+++ b/UsageView/src/com/intellij/usages/UsageInfo2UsageAdapter.java
@@ -268,7 +268,7 @@ public VirtualFile getFile() {
return getElement().getContainingFile().getVirtualFile();
}
else {
- return myUsageInfo.getFile().getVirtualFile();
+ return myUsageInfo.getVirtualFile();
}
}
diff --git a/UsageView/src/com/intellij/usages/impl/UsageViewImpl.java b/UsageView/src/com/intellij/usages/impl/UsageViewImpl.java
index c46e65e5b21a9..a10d988422672 100644
--- a/UsageView/src/com/intellij/usages/impl/UsageViewImpl.java
+++ b/UsageView/src/com/intellij/usages/impl/UsageViewImpl.java
@@ -1051,7 +1051,6 @@ private MyPanel(JTree tree) {
mySupport = new OccurenceNavigatorSupport(tree) {
protected Navigatable createDescriptorForNode(DefaultMutableTreeNode node) {
if (node.getChildCount() > 0) return null;
- if (node instanceof Node && !((Node)node).isValid()) return null;
return getNavigatableForNode(node);
}
diff --git a/lang-api/src/com/intellij/usageView/UsageInfo.java b/lang-api/src/com/intellij/usageView/UsageInfo.java
index f3d5109c828ea..522fca4a408ad 100644
--- a/lang-api/src/com/intellij/usageView/UsageInfo.java
+++ b/lang-api/src/com/intellij/usageView/UsageInfo.java
@@ -30,6 +30,7 @@ public class UsageInfo {
public static final UsageInfo[] EMPTY_ARRAY = new UsageInfo[0];
private static final Logger LOG = Logger.getInstance("#com.intellij.usageView.UsageInfo");
private final SmartPsiElementPointer mySmartPointer;
+ private final VirtualFile myVirtualFile;
public final int startOffset; // in navigation element
public final int endOffset; // in navigation element
@@ -39,6 +40,7 @@ public UsageInfo(@NotNull PsiElement element, int startOffset, int endOffset, bo
LOG.assertTrue(element.isValid());
LOG.assertTrue(element == element.getNavigationElement());
mySmartPointer = SmartPointerManager.getInstance(element.getProject()).createSmartPsiElementPointer(element);
+ myVirtualFile = element.getContainingFile().getVirtualFile();
this.startOffset = startOffset;
this.endOffset = endOffset;
this.isNonCodeUsage = isNonCodeUsage;
@@ -48,6 +50,7 @@ public UsageInfo(@NotNull PsiElement element, boolean isNonCodeUsage) {
LOG.assertTrue(element.isValid());
element = element.getNavigationElement();
mySmartPointer = SmartPointerManager.getInstance(element.getProject()).createSmartPsiElementPointer(element);
+ myVirtualFile = element.getContainingFile().getVirtualFile();
TextRange range = element.getTextRange();
if (range == null) {
@@ -141,4 +144,8 @@ public int hashCode() {
public PsiFile getFile() {
return mySmartPointer.getContainingFile();
}
+
+ public VirtualFile getVirtualFile() {
+ return myVirtualFile;
+ }
}
diff --git a/lang-impl/src/com/intellij/usageView/impl/UsageViewManagerImpl.java b/lang-impl/src/com/intellij/usageView/impl/UsageViewManagerImpl.java
index 6c9817474269f..8f5cd1710e462 100644
--- a/lang-impl/src/com/intellij/usageView/impl/UsageViewManagerImpl.java
+++ b/lang-impl/src/com/intellij/usageView/impl/UsageViewManagerImpl.java
@@ -25,7 +25,7 @@ public class UsageViewManagerImpl extends UsageViewManager {
public UsageViewManagerImpl(final Project project, final ToolWindowManager toolWindowManager) {
myToolWindowManager = toolWindowManager;
- ToolWindow toolWindow = myToolWindowManager.registerToolWindow(ToolWindowId.FIND, true, ToolWindowAnchor.BOTTOM, project);
+ ToolWindow toolWindow = myToolWindowManager.registerToolWindow(ToolWindowId.FIND, true, ToolWindowAnchor.BOTTOM, project, true);
toolWindow.setToHideOnEmptyContent(true);
toolWindow.setIcon(IconLoader.getIcon("/general/toolWindowFind.png"));
myFindContentManager = toolWindow.getContentManager();
|
a7a67f1a1adc33e6f629bd1aa9aaa92170eda88e
|
intellij-community
|
ChangeUtil-encode/decodeInformation for groovy--
|
a
|
https://github.com/JetBrains/intellij-community
|
diff --git a/plugins/groovy/src/org/jetbrains/plugins/groovy/GroovyLoader.java b/plugins/groovy/src/org/jetbrains/plugins/groovy/GroovyLoader.java
index 55e049e160fdd..b31bd64c0657e 100644
--- a/plugins/groovy/src/org/jetbrains/plugins/groovy/GroovyLoader.java
+++ b/plugins/groovy/src/org/jetbrains/plugins/groovy/GroovyLoader.java
@@ -27,6 +27,8 @@
import com.intellij.openapi.project.ProjectManagerAdapter;
import static com.intellij.patterns.PlatformPatterns.psiElement;
import com.intellij.psi.PsiElement;
+import com.intellij.psi.impl.source.tree.ChangeUtil;
+import org.jetbrains.plugins.groovy.lang.GroovyChangeUtilSupport;
import com.intellij.refactoring.rename.RenameInputValidator;
import com.intellij.refactoring.rename.RenameInputValidatorRegistry;
import com.intellij.util.Function;
@@ -53,6 +55,8 @@ public class GroovyLoader implements ApplicationComponent {
public void initComponent() {
GroovyEditorActionsManager.registerGroovyEditorActions();
+ ChangeUtil.registerCopyHandler(new GroovyChangeUtilSupport());
+
//Register Keyword completion
setupCompletion();
diff --git a/plugins/groovy/src/org/jetbrains/plugins/groovy/lang/GroovyChangeUtilSupport.java b/plugins/groovy/src/org/jetbrains/plugins/groovy/lang/GroovyChangeUtilSupport.java
new file mode 100644
index 0000000000000..29cf02a2831c5
--- /dev/null
+++ b/plugins/groovy/src/org/jetbrains/plugins/groovy/lang/GroovyChangeUtilSupport.java
@@ -0,0 +1,76 @@
+/*
+ * @author max
+ */
+package org.jetbrains.plugins.groovy.lang;
+
+import com.intellij.lang.ASTNode;
+import com.intellij.openapi.util.Key;
+import com.intellij.psi.*;
+import com.intellij.psi.impl.source.SourceTreeToPsiMap;
+import com.intellij.psi.impl.source.tree.CompositeElement;
+import com.intellij.psi.impl.source.tree.TreeCopyHandler;
+import com.intellij.psi.impl.source.tree.TreeElement;
+import com.intellij.util.IncorrectOperationException;
+import org.jetbrains.plugins.groovy.lang.parser.GroovyElementTypes;
+import org.jetbrains.plugins.groovy.lang.parser.GroovyReferenceAdjuster;
+import org.jetbrains.plugins.groovy.lang.psi.GrReferenceElement;
+import org.jetbrains.plugins.groovy.lang.psi.api.GroovyResolveResult;
+import org.jetbrains.plugins.groovy.lang.psi.api.toplevel.imports.GrImportStatement;
+
+import java.util.Map;
+
+/**
+ * @author peter
+ */
+public class GroovyChangeUtilSupport implements TreeCopyHandler {
+
+ public TreeElement decodeInformation(TreeElement element, final Map<Object, Object> decodingState) {
+ if (element instanceof CompositeElement) {
+ if (element.getElementType() == GroovyElementTypes.REFERENCE_ELEMENT ||
+ element.getElementType() == GroovyElementTypes.REFERENCE_EXPRESSION) {
+ GrReferenceElement ref = (GrReferenceElement)SourceTreeToPsiMap.treeElementToPsi(element);
+ final PsiMember refMember = element.getCopyableUserData(REFERENCED_MEMBER_KEY);
+ if (refMember != null) {
+ element.putCopyableUserData(REFERENCED_MEMBER_KEY, null);
+ PsiElement refElement1 = ref.resolve();
+ if (!refMember.getManager().areElementsEquivalent(refMember, refElement1)) {
+ try {
+ if (!(refMember instanceof PsiClass) || ref.getQualifier() == null) {
+ // can restore only if short (otherwise qualifier should be already restored)
+ ref = (GrReferenceElement)ref.bindToElement(refMember);
+ }
+ }
+ catch (IncorrectOperationException ignored) {
+ }
+ return (TreeElement)SourceTreeToPsiMap.psiElementToTree(ref);
+ } else {
+ // shorten references to the same package and to inner classes that can be accessed by short name
+ GroovyReferenceAdjuster.INSTANCE.process(element, false, false);
+ }
+ }
+ return element;
+ }
+ }
+ return null;
+ }
+
+ public void encodeInformation(final TreeElement element, final ASTNode original, final Map<Object, Object> encodingState) {
+ if (original instanceof CompositeElement) {
+ if (original.getElementType() == GroovyElementTypes.REFERENCE_ELEMENT || original.getElementType() == GroovyElementTypes.REFERENCE_EXPRESSION) {
+ final GroovyResolveResult result = ((GrReferenceElement)original.getPsi()).advancedResolve();
+ if (result != null) {
+ final PsiElement target = result.getElement();
+
+ if (target instanceof PsiClass ||
+ (target instanceof PsiMethod || target instanceof PsiField) &&
+ ((PsiMember) target).hasModifierProperty(PsiModifier.STATIC) &&
+ result.getCurrentFileResolveContext() instanceof GrImportStatement) {
+ element.putCopyableUserData(REFERENCED_MEMBER_KEY, (PsiMember) target);
+ }
+ }
+ }
+ }
+ }
+
+ private static final Key<PsiMember> REFERENCED_MEMBER_KEY = Key.create("REFERENCED_MEMBER_KEY");
+}
\ No newline at end of file
diff --git a/plugins/groovy/src/org/jetbrains/plugins/groovy/lang/psi/util/PsiUtil.java b/plugins/groovy/src/org/jetbrains/plugins/groovy/lang/psi/util/PsiUtil.java
index 32fd42f5bfdf7..12cea7247ff66 100644
--- a/plugins/groovy/src/org/jetbrains/plugins/groovy/lang/psi/util/PsiUtil.java
+++ b/plugins/groovy/src/org/jetbrains/plugins/groovy/lang/psi/util/PsiUtil.java
@@ -405,9 +405,10 @@ private static void doShorten(PsiElement element) {
}
public static void shortenReference(GrCodeReferenceElement ref) {
- if (ref.getQualifier() != null && mayShorten(ref)) {
+ if (ref.getQualifier() != null && (PsiTreeUtil.getParentOfType(ref, GrDocMemberReference.class) != null ||
+ PsiTreeUtil.getParentOfType(ref, GrDocComment.class) == null)) {
final PsiElement resolved = ref.resolve();
- if (resolved instanceof PsiClass) {
+ if (resolved instanceof PsiClass && mayShorten(ref)) {
ref.setQualifier(null);
try {
ref.bindToElement(resolved);
@@ -419,9 +420,21 @@ public static void shortenReference(GrCodeReferenceElement ref) {
}
}
- private static boolean mayShorten(GrCodeReferenceElement ref) {
- if (PsiTreeUtil.getParentOfType(ref, GrDocMemberReference.class) != null) return true;
- return PsiTreeUtil.getParentOfType(ref, GrDocComment.class) == null;
+ private static boolean mayShorten(@NotNull GrCodeReferenceElement ref) {
+ GrCodeReferenceElement cur = (GrCodeReferenceElement)ref.copy();
+ while (true) {
+ final GrCodeReferenceElement qualifier = cur.getQualifier();
+ if (qualifier == null) {
+ return true;
+ }
+ if (!(qualifier.resolve() instanceof PsiClass)) {
+ final PsiClass correctResolved = (PsiClass)cur.resolve();
+ cur.setQualifier(null);
+ final PsiClass rawResolved = (PsiClass)cur.resolve();
+ return rawResolved == null || cur.getManager().areElementsEquivalent(correctResolved, rawResolved);
+ }
+ cur = qualifier;
+ }
}
@Nullable
diff --git a/plugins/groovy/src/org/jetbrains/plugins/groovy/refactoring/GroovyChangeContextUtil.java b/plugins/groovy/src/org/jetbrains/plugins/groovy/refactoring/GroovyChangeContextUtil.java
index a0a630d449cd9..c194f9225fbfc 100644
--- a/plugins/groovy/src/org/jetbrains/plugins/groovy/refactoring/GroovyChangeContextUtil.java
+++ b/plugins/groovy/src/org/jetbrains/plugins/groovy/refactoring/GroovyChangeContextUtil.java
@@ -23,8 +23,6 @@
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrReferenceExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrThisReferenceExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrAccessorMethod;
-import org.jetbrains.plugins.groovy.lang.psi.api.types.GrClassTypeElement;
-import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElementFactory;
/**
* @author Maxim.Medvedev
@@ -91,6 +89,7 @@ public static void decodeContextInfo(PsiElement element, PsiClass thisClass, GrE
return;
}
}
+ /*
else if (element instanceof GrReferenceExpression) {
final GroovyPsiElementFactory factory = GroovyPsiElementFactory.getInstance(element.getProject());
final GrReferenceExpression refExpr = (GrReferenceExpression)element;
@@ -119,8 +118,9 @@ else if (thisAccessExpr instanceof GrReferenceExpression) {
}
}
}
+ */
- PsiClass refClass = element.getCopyableUserData(REF_TO_CLASS);
+ /*PsiClass refClass = element.getCopyableUserData(REF_TO_CLASS);
element.putCopyableUserData(REF_TO_CLASS, null);
if (refClass != null && refClass.isValid()) {
@@ -128,7 +128,7 @@ else if (thisAccessExpr instanceof GrReferenceExpression) {
if (ref != null) {
ref.bindToElement(refClass);
}
- }
+ }*/
}
}
diff --git a/plugins/groovy/src/org/jetbrains/plugins/groovy/refactoring/move/MoveGroovyClassHandler.java b/plugins/groovy/src/org/jetbrains/plugins/groovy/refactoring/move/MoveGroovyClassHandler.java
index c27e3f2c04728..955dca3e01076 100644
--- a/plugins/groovy/src/org/jetbrains/plugins/groovy/refactoring/move/MoveGroovyClassHandler.java
+++ b/plugins/groovy/src/org/jetbrains/plugins/groovy/refactoring/move/MoveGroovyClassHandler.java
@@ -17,9 +17,11 @@
package org.jetbrains.plugins.groovy.refactoring.move;
import com.intellij.psi.*;
-import com.intellij.psi.impl.source.tree.TreeElement;
import com.intellij.psi.impl.source.tree.Factory;
+import com.intellij.psi.impl.source.tree.TreeElement;
import com.intellij.psi.javadoc.PsiDocComment;
+import com.intellij.psi.search.LocalSearchScope;
+import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.refactoring.MoveDestination;
import com.intellij.refactoring.move.moveClassesOrPackages.MoveClassHandler;
import com.intellij.util.IncorrectOperationException;
@@ -27,11 +29,9 @@
import org.jetbrains.plugins.groovy.GroovyFileType;
import org.jetbrains.plugins.groovy.actions.GroovyTemplatesFactory;
import org.jetbrains.plugins.groovy.actions.NewGroovyActionBase;
-import org.jetbrains.plugins.groovy.lang.psi.GroovyFile;
-import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElement;
-import org.jetbrains.plugins.groovy.lang.psi.GroovyRecursiveElementVisitor;
-import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrReferenceExpression;
import org.jetbrains.plugins.groovy.lang.lexer.GroovyTokenTypes;
+import org.jetbrains.plugins.groovy.lang.psi.GroovyFile;
+import org.jetbrains.plugins.groovy.lang.psi.api.types.GrCodeReferenceElement;
import org.jetbrains.plugins.groovy.refactoring.GroovyChangeContextUtil;
/**
@@ -87,42 +87,24 @@ else if (!newDirectory.equals(file.getContainingDirectory()) && newDirectory.fin
}
private static void correctOldClassReferences(final PsiClass newClass, final PsiClass oldClass) {
- ((GroovyPsiElement)newClass).accept(new GroovyRecursiveElementVisitor() {
- @Override
- public void visitReferenceExpression(GrReferenceExpression reference) {
- if (reference.isReferenceTo(oldClass)) {
- try {
- reference.bindToElement(newClass);
- }
- catch (IncorrectOperationException e) {
- //here is an exception
- }
- }
- super.visitReferenceExpression(reference);
- }
- });
+ for (PsiReference reference : ReferencesSearch.search(oldClass, new LocalSearchScope(newClass)).findAll()) {
+ reference.bindToElement(newClass);
+ }
}
private static void correctSelfReferences(final PsiClass aClass, final PsiPackage newContainingPackage) {
final PsiPackage aPackage = JavaDirectoryService.getInstance().getPackage(aClass.getContainingFile().getContainingDirectory());
- if (aPackage != null) {
- ((GroovyPsiElement)aClass).accept(new GroovyRecursiveElementVisitor() {
- @Override
- public void visitReferenceExpression(GrReferenceExpression reference) {
- if (reference.isQualified() && reference.isReferenceTo(aClass)) {
- final PsiElement qualifier = reference.getQualifier();
- if (qualifier instanceof PsiJavaCodeReferenceElement && ((PsiJavaCodeReferenceElement)qualifier).isReferenceTo(aPackage)) {
- try {
- ((PsiJavaCodeReferenceElement)qualifier).bindToElement(newContainingPackage);
- }
- catch (IncorrectOperationException e) {
- //here is an exception
- }
- }
- }
- super.visitReferenceExpression(reference);
+ if (aPackage == null) {
+ return;
+ }
+
+ for (PsiReference reference : ReferencesSearch.search(aClass, new LocalSearchScope(aClass)).findAll()) {
+ if (reference instanceof GrCodeReferenceElement) {
+ final GrCodeReferenceElement qualifier = ((GrCodeReferenceElement)reference).getQualifier();
+ if (qualifier != null) {
+ qualifier.bindToElement(newContainingPackage);
}
- });
+ }
}
}
diff --git a/plugins/groovy/testdata/refactoring/move/moveClass/ideadev27996/after/pack2/X.groovy b/plugins/groovy/testdata/refactoring/move/moveClass/ideadev27996/after/pack2/X.groovy
index 7fff3ec5d135c..2aaf40c5b6eca 100644
--- a/plugins/groovy/testdata/refactoring/move/moveClass/ideadev27996/after/pack2/X.groovy
+++ b/plugins/groovy/testdata/refactoring/move/moveClass/ideadev27996/after/pack2/X.groovy
@@ -1,4 +1,5 @@
package pack2
+
public class X {
public void foo(X x) {}
}
\ No newline at end of file
diff --git a/plugins/groovy/testdata/refactoring/move/moveClass/moveMultiple1/after/pack2/Class1.groovy b/plugins/groovy/testdata/refactoring/move/moveClass/moveMultiple1/after/pack2/Class1.groovy
index 54e4d0eddd4aa..02b02d6239efe 100644
--- a/plugins/groovy/testdata/refactoring/move/moveClass/moveMultiple1/after/pack2/Class1.groovy
+++ b/plugins/groovy/testdata/refactoring/move/moveClass/moveMultiple1/after/pack2/Class1.groovy
@@ -1,4 +1,4 @@
-package pack2
+package pack2;
public class Class1 {
Class2 a;
|
db217e6a153928cfab79b030ceb351b6fb543642
|
hbase
|
HBASE-920 Make region balancing sloppier--git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@704781 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index 29bb96bfea57..0afc8c42388e 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -40,6 +40,7 @@ Release 0.19.0 - Unreleased
HBASE-908 Add approximate counting to CountingBloomFilter
(Andrzej Bialecki via Stack)
HBASE-576 Investigate IPC performance
+ HBASE-920 Make region balancing sloppier
NEW FEATURES
HBASE-875 Use MurmurHash instead of JenkinsHash [in bloomfilters]
diff --git a/conf/hbase-default.xml b/conf/hbase-default.xml
index ae3855169c18..62c188438084 100644
--- a/conf/hbase-default.xml
+++ b/conf/hbase-default.xml
@@ -22,14 +22,6 @@
*/
-->
<configuration>
- <property>
- <name>hbase.master</name>
- <value>local</value>
- <description>The host and port that the HBase master runs at.
- A value of 'local' runs the master and a regionserver in
- a single process.
- </description>
- </property>
<property>
<name>hbase.rootdir</name>
<value>file:///tmp/hbase-${user.name}/hbase</value>
@@ -38,6 +30,14 @@
E.g: hdfs://NAMENODE_SERVER:PORT/HBASE_ROOTDIR
</description>
</property>
+ <property>
+ <name>hbase.master</name>
+ <value>local</value>
+ <description>The host and port that the HBase master runs at.
+ A value of 'local' runs the master and a regionserver in
+ a single process.
+ </description>
+ </property>
<property>
<name>hbase.master.info.port</name>
<value>60010</value>
@@ -51,6 +51,21 @@
<description>The address for the hbase master web UI
</description>
</property>
+ <property>
+ <name>hbase.master.meta.thread.rescanfrequency</name>
+ <value>60000</value>
+ <description>How long the HMaster sleeps (in milliseconds) between scans of
+ the root and meta tables.
+ </description>
+ </property>
+ <property>
+ <name>hbase.master.lease.period</name>
+ <value>120000</value>
+ <description>HMaster server lease period in milliseconds. Default is
+ 120 seconds. Region servers must report in within this period else
+ they are considered dead. On loaded cluster, may need to up this
+ period.</description>
+ </property>
<property>
<name>hbase.regionserver</name>
<value>0.0.0.0:60020</value>
@@ -110,28 +125,6 @@
calls of next may take longer and longer times when the cache is empty.
</description>
</property>
- <property>
- <name>hbase.master.meta.thread.rescanfrequency</name>
- <value>60000</value>
- <description>How long the HMaster sleeps (in milliseconds) between scans of
- the root and meta tables.
- </description>
- </property>
- <property>
- <name>hbase.master.lease.period</name>
- <value>120000</value>
- <description>HMaster server lease period in milliseconds. Default is
- 120 seconds. Region servers must report in within this period else
- they are considered dead. On loaded cluster, may need to up this
- period.</description>
- </property>
- <property>
- <name>hbase.hbasemaster.maxregionopen</name>
- <value>120000</value>
- <description>Period to wait for a region open. If regionserver
- takes longer than this interval, assign to a new regionserver.
- </description>
- </property>
<property>
<name>hbase.regionserver.lease.period</name>
<value>60000</value>
@@ -139,13 +132,6 @@
60 seconds. Clients must report in within this period else they are
considered dead.</description>
</property>
- <property>
- <name>hbase.server.thread.wakefrequency</name>
- <value>10000</value>
- <description>Time to sleep in between searches for work (in milliseconds).
- Used as sleep interval by service threads such as META scanner and log roller.
- </description>
- </property>
<property>
<name>hbase.regionserver.handler.count</name>
<value>10</value>
@@ -189,6 +175,50 @@
Default: 30 minutes (in miliseconds)
</description>
</property>
+ <property>
+ <name>hbase.regionserver.thread.splitcompactcheckfrequency</name>
+ <value>20000</value>
+ <description>How often a region server runs the split/compaction check.
+ </description>
+ </property>
+ <property>
+ <name>hbase.regionserver.nbreservationblocks</name>
+ <value>4</value>
+ <description>The number of reservation blocks which are used to prevent
+ unstable region servers caused by an OOME.
+ </description>
+ </property>
+ <property>
+ <name>hbase.regionserver.globalMemcacheLimit</name>
+ <value>536870912</value>
+ <description>Maximum size of all memcaches in a region server before new
+ updates are blocked and flushes are forced. Defaults to 512MB.
+ </description>
+ </property>
+ <property>
+ <name>hbase.regionserver.globalMemcacheLimitlowMark</name>
+ <value>256435456</value>
+ <description>When memcaches are being forced to flush to make room in
+ memory, keep flushing until we hit this mark. Defaults to 256MB. Setting
+ this value equal to hbase.regionserver.globalmemcachelimit causes the
+ minimum possible flushing to occur when updates are blocked due to
+ memcache limiting.
+ </description>
+ </property>
+ <property>
+ <name>hbase.hbasemaster.maxregionopen</name>
+ <value>120000</value>
+ <description>Period to wait for a region open. If regionserver
+ takes longer than this interval, assign to a new regionserver.
+ </description>
+ </property>
+ <property>
+ <name>hbase.server.thread.wakefrequency</name>
+ <value>10000</value>
+ <description>Time to sleep in between searches for work (in milliseconds).
+ Used as sleep interval by service threads such as META scanner and log roller.
+ </description>
+ </property>
<property>
<name>hbase.hregion.memcache.flush.size</name>
<value>67108864</value>
@@ -234,12 +264,6 @@
If too large, clients timeout during compaction.
</description>
</property>
- <property>
- <name>hbase.regionserver.thread.splitcompactcheckfrequency</name>
- <value>20000</value>
- <description>How often a region server runs the split/compaction check.
- </description>
- </property>
<property>
<name>hbase.hstore.compaction.max</name>
<value>10</value>
@@ -254,10 +278,10 @@
</description>
</property>
<property>
- <name>hbase.regionserver.nbreservationblocks</name>
- <value>4</value>
- <description>The number of reservation blocks which are used to prevent
- unstable region servers caused by an OOME.
+ <name>hbase.regions.slop</name>
+ <value>0.1</value>
+ <description>Rebalance if regionserver has average + (average * slop) regions.
+ Default is 10% slop.
</description>
</property>
<property>
@@ -283,23 +307,6 @@
<description>The size of each block in any block caches.
</description>
</property>
- <property>
- <name>hbase.regionserver.globalMemcacheLimit</name>
- <value>536870912</value>
- <description>Maximum size of all memcaches in a region server before new
- updates are blocked and flushes are forced. Defaults to 512MB.
- </description>
- </property>
- <property>
- <name>hbase.regionserver.globalMemcacheLimitlowMark</name>
- <value>256435456</value>
- <description>When memcaches are being forced to flush to make room in
- memory, keep flushing until we hit this mark. Defaults to 256MB. Setting
- this value equal to hbase.regionserver.globalmemcachelimit causes the
- minimum possible flushing to occur when updates are blocked due to
- memcache limiting.
- </description>
- </property>
<property>
<name>hbase.hash.type</name>
<value>murmur</value>
diff --git a/src/java/org/apache/hadoop/hbase/master/RegionManager.java b/src/java/org/apache/hadoop/hbase/master/RegionManager.java
index 9bc808a239e1..cc3cc5904c33 100644
--- a/src/java/org/apache/hadoop/hbase/master/RegionManager.java
+++ b/src/java/org/apache/hadoop/hbase/master/RegionManager.java
@@ -118,15 +118,17 @@ class RegionManager implements HConstants {
private final int maxAssignInOneGo;
private final HMaster master;
-
private final RegionHistorian historian;
+ private final float slop;
RegionManager(HMaster master) {
this.master = master;
this.historian = RegionHistorian.getInstance();
this.maxAssignInOneGo = this.master.getConfiguration().
getInt("hbase.regions.percheckin", 10);
-
+ this.slop = this.master.getConfiguration().getFloat("hbase.regions.slop",
+ (float)0.1);
+
// The root region
rootScannerThread = new RootScanner(master, this);
@@ -183,13 +185,18 @@ void assignRegions(HServerInfo info, String serverName,
// We only do load balancing once all regions are assigned.
// This prevents churn while the cluster is starting up.
double avgLoad = master.serverManager.getAverageLoad();
- if (avgLoad > 2.0 && thisServersLoad.getNumberOfRegions() > avgLoad) {
+ double avgLoadWithSlop = avgLoad +
+ ((this.slop != 0)? avgLoad * this.slop: avgLoad);
+ if (avgLoad > 2.0 &&
+ thisServersLoad.getNumberOfRegions() > avgLoadWithSlop) {
if (LOG.isDebugEnabled()) {
- LOG.debug("Server " + serverName + " is overloaded. Server load: " +
- thisServersLoad.getNumberOfRegions() + " avg: " + avgLoad);
+ LOG.debug("Server " + serverName +
+ " is overloaded. Server load: " +
+ thisServersLoad.getNumberOfRegions() + " avg: " + avgLoad +
+ ", slop: " + this.slop);
}
- unassignSomeRegions(thisServersLoad, avgLoad, mostLoadedRegions,
- returnMsgs);
+ unassignSomeRegions(thisServersLoad, avgLoad, mostLoadedRegions,
+ returnMsgs);
}
}
} else {
diff --git a/src/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java b/src/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
index b5e1249ce031..1d75d9a2eda5 100644
--- a/src/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
+++ b/src/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
@@ -36,6 +36,10 @@
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.io.BlockFSInputStream;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
@@ -46,14 +50,8 @@
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.onelab.filter.BloomFilter;
-import org.onelab.filter.HashFunction;
import org.onelab.filter.Key;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HStoreKey;
-
/**
* A HStore data file. HStores usually have one or more of these files. They
* are produced by flushing the memcache to disk.
|
eeca7f092acfcc681155e6bfd5c77611df15ebe0
|
intellij-community
|
report expressions like "!b != c" in "Double- negation" inspection and add test--
|
a
|
https://github.com/JetBrains/intellij-community
|
diff --git a/plugins/InspectionGadgets/src/com/siyeh/ig/controlflow/DoubleNegationInspection.java b/plugins/InspectionGadgets/src/com/siyeh/ig/controlflow/DoubleNegationInspection.java
index 4c8817a705c56..8627f9086fed5 100644
--- a/plugins/InspectionGadgets/src/com/siyeh/ig/controlflow/DoubleNegationInspection.java
+++ b/plugins/InspectionGadgets/src/com/siyeh/ig/controlflow/DoubleNegationInspection.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2006-2007 Bas Leijdekkers
+ * Copyright 2006-2011 Bas Leijdekkers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,12 +18,13 @@
import com.intellij.codeInspection.ProblemDescriptor;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
-import com.intellij.psi.tree.IElementType;
import com.intellij.util.IncorrectOperationException;
import com.siyeh.InspectionGadgetsBundle;
import com.siyeh.ig.BaseInspection;
import com.siyeh.ig.BaseInspectionVisitor;
import com.siyeh.ig.InspectionGadgetsFix;
+import com.siyeh.ig.psiutils.BoolUtils;
+import com.siyeh.ig.psiutils.ParenthesesUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
@@ -36,8 +37,7 @@ public String getDisplayName() {
@NotNull
protected String buildErrorString(Object... infos) {
- return InspectionGadgetsBundle.message(
- "double.negation.problem.descriptor");
+ return InspectionGadgetsBundle.message("double.negation.problem.descriptor");
}
@Nullable
@@ -52,44 +52,21 @@ public String getName() {
return InspectionGadgetsBundle.message("double.negation.quickfix");
}
- protected void doFix(Project project, ProblemDescriptor descriptor)
- throws IncorrectOperationException {
- final PsiPrefixExpression expression =
- (PsiPrefixExpression)descriptor.getPsiElement();
- PsiExpression operand = expression.getOperand();
- while (operand instanceof PsiParenthesizedExpression) {
- final PsiParenthesizedExpression parenthesizedExpression =
- (PsiParenthesizedExpression)operand;
- operand = parenthesizedExpression.getExpression();
- }
- if (operand instanceof PsiPrefixExpression) {
- final PsiPrefixExpression prefixExpression =
- (PsiPrefixExpression)operand;
- final PsiExpression innerOperand = prefixExpression.getOperand();
- if (innerOperand == null) {
- return;
- }
- expression.replace(innerOperand);
- }
- else if (operand instanceof PsiBinaryExpression) {
- final PsiBinaryExpression binaryExpression =
- (PsiBinaryExpression)operand;
+ protected void doFix(Project project, ProblemDescriptor descriptor) throws IncorrectOperationException {
+ final PsiElement expression = descriptor.getPsiElement();
+ if (expression instanceof PsiPrefixExpression) {
+ final PsiPrefixExpression prefixExpression = (PsiPrefixExpression)expression;
+ final PsiExpression operand = prefixExpression.getOperand();
+ replaceExpression(prefixExpression, BoolUtils.getNegatedExpressionText(operand));
+ } else if (expression instanceof PsiBinaryExpression) {
+ final PsiBinaryExpression binaryExpression = (PsiBinaryExpression)expression;
+ final StringBuilder newExpressionText = new StringBuilder();
final PsiExpression lhs = binaryExpression.getLOperand();
- final String lhsText = lhs.getText();
- final StringBuilder builder =
- new StringBuilder(lhsText);
- builder.append("==");
+ newExpressionText.append(BoolUtils.getNegatedExpressionText(lhs));
+ newExpressionText.append("==");
final PsiExpression rhs = binaryExpression.getROperand();
- if (rhs != null) {
- final String rhsText = rhs.getText();
- builder.append(rhsText);
- }
- final PsiManager manager = binaryExpression.getManager();
- final PsiElementFactory factory = JavaPsiFacade.getInstance(manager.getProject()).getElementFactory();
- final PsiExpression newExpression =
- factory.createExpressionFromText(builder.toString(),
- binaryExpression);
- expression.replace(newExpression);
+ newExpressionText.append(BoolUtils.getNegatedExpressionText(rhs));
+ replaceExpression(binaryExpression, newExpressionText.toString());
}
}
}
@@ -103,39 +80,43 @@ private static class DoubleNegationVisitor extends BaseInspectionVisitor {
@Override
public void visitPrefixExpression(PsiPrefixExpression expression) {
super.visitPrefixExpression(expression);
- final IElementType tokenType = expression.getOperationTokenType();
- if (!JavaTokenType.EXCL.equals(tokenType)) {
+ if (!isNegation(expression)) {
+ return;
+ }
+ final PsiExpression operand = expression.getOperand();
+ if (!isNegation(operand)) {
return;
}
- checkParent(expression);
+ registerError(expression);
}
@Override
public void visitBinaryExpression(PsiBinaryExpression expression) {
super.visitBinaryExpression(expression);
- final IElementType tokenType = expression.getOperationTokenType();
- if (!JavaTokenType.NE.equals(tokenType)) {
+ if (!isNegation(expression)) {
return;
}
- checkParent(expression);
- }
-
- private void checkParent(PsiExpression expression) {
- PsiElement parent = expression.getParent();
- while (parent instanceof PsiParenthesizedExpression) {
- parent = parent.getParent();
- }
- if (!(parent instanceof PsiPrefixExpression)) {
+ final PsiExpression lhs = expression.getLOperand();
+ final PsiExpression rhs = expression.getROperand();
+ if (rhs == null || !isNegation(lhs) && !isNegation(rhs)) {
return;
}
- final PsiPrefixExpression prefixExpression =
- (PsiPrefixExpression)parent;
- final IElementType parentTokenType =
- prefixExpression.getOperationTokenType();
- if (!JavaTokenType.EXCL.equals(parentTokenType)) {
- return;
- }
- registerError(prefixExpression);
+ registerError(expression);
+ }
+
+ private static boolean isNegation(PsiExpression expression) {
+ expression = ParenthesesUtils.stripParentheses(expression);
+ if (expression instanceof PsiPrefixExpression) return isNegation((PsiPrefixExpression)expression);
+ if (expression instanceof PsiBinaryExpression) return isNegation((PsiBinaryExpression)expression);
+ return false;
+ }
+
+ private static boolean isNegation(PsiBinaryExpression expression) {
+ return JavaTokenType.NE.equals(expression.getOperationTokenType());
+ }
+
+ private static boolean isNegation(PsiPrefixExpression expression) {
+ return JavaTokenType.EXCL.equals(expression.getOperationTokenType());
}
}
}
\ No newline at end of file
diff --git a/plugins/InspectionGadgets/test/com/siyeh/igtest/controlflow/double_negation/DoubleNegation.java b/plugins/InspectionGadgets/test/com/siyeh/igtest/controlflow/double_negation/DoubleNegation.java
new file mode 100644
index 0000000000000..68660aeff73d2
--- /dev/null
+++ b/plugins/InspectionGadgets/test/com/siyeh/igtest/controlflow/double_negation/DoubleNegation.java
@@ -0,0 +1,11 @@
+package com.siyeh.igtest.controlflow.double_negation;
+
+public class DoubleNegation {
+
+ void negative(boolean b1, boolean b2, boolean b3) {
+ boolean r1 = !(b1 != b2);
+ boolean r2 = !!b1;
+ boolean r3 = !b1 != b2;
+ boolean r4 = (b1 != (b2 != b3));
+ }
+}
diff --git a/plugins/InspectionGadgets/test/com/siyeh/igtest/controlflow/double_negation/expected.xml b/plugins/InspectionGadgets/test/com/siyeh/igtest/controlflow/double_negation/expected.xml
new file mode 100644
index 0000000000000..64a74d15ddbcc
--- /dev/null
+++ b/plugins/InspectionGadgets/test/com/siyeh/igtest/controlflow/double_negation/expected.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<problems>
+ <problem>
+ <file>DoubleNegation.java</file>
+ <line>6</line>
+ <problem_class severity="WARNING" attribute_key="WARNING_ATTRIBUTES">Double negation</problem_class>
+ <description>Double negation in <code>!(b1 != b2)</code> #loc</description>
+ </problem>
+
+ <problem>
+ <file>DoubleNegation.java</file>
+ <line>7</line>
+ <problem_class severity="WARNING" attribute_key="WARNING_ATTRIBUTES">Double negation</problem_class>
+ <description>Double negation in <code>!!b1</code> #loc</description>
+ </problem>
+
+ <problem>
+ <file>DoubleNegation.java</file>
+ <line>8</line>
+ <problem_class severity="WARNING" attribute_key="WARNING_ATTRIBUTES">Double negation</problem_class>
+ <description>Double negation in <code>!b1 != b2</code> #loc</description>
+ </problem>
+
+ <problem>
+ <file>DoubleNegation.java</file>
+ <line>9</line>
+ <problem_class severity="WARNING" attribute_key="WARNING_ATTRIBUTES">Double negation</problem_class>
+ <description>Double negation in <code>b1 != (b2 != b3)</code> #loc</description>
+ </problem>
+</problems>
\ No newline at end of file
diff --git a/plugins/InspectionGadgets/testsrc/com/siyeh/ig/controlflow/DoubleNegationInspectionTest.java b/plugins/InspectionGadgets/testsrc/com/siyeh/ig/controlflow/DoubleNegationInspectionTest.java
new file mode 100644
index 0000000000000..18039b1c76c50
--- /dev/null
+++ b/plugins/InspectionGadgets/testsrc/com/siyeh/ig/controlflow/DoubleNegationInspectionTest.java
@@ -0,0 +1,11 @@
+package com.siyeh.ig.controlflow;
+
+import com.siyeh.ig.IGInspectionTestCase;
+
+public class DoubleNegationInspectionTest
+ extends IGInspectionTestCase {
+
+ public void test() throws Exception {
+ doTest("com/siyeh/igtest/controlflow/double_negation", new DoubleNegationInspection());
+ }
+}
\ No newline at end of file
|
950786a8cc1a83dde6e590f59958f9ada8699288
|
spring-framework
|
Add Jackson2ObjectMapperBeanFactory--Issue: SPR-9739-
|
a
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-web/src/main/java/org/springframework/http/converter/json/Jackson2ObjectMapperFactoryBean.java b/spring-web/src/main/java/org/springframework/http/converter/json/Jackson2ObjectMapperFactoryBean.java
new file mode 100644
index 000000000000..19582f4171d2
--- /dev/null
+++ b/spring-web/src/main/java/org/springframework/http/converter/json/Jackson2ObjectMapperFactoryBean.java
@@ -0,0 +1,327 @@
+/*
+ * Copyright 2002-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.http.converter.json;
+
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.springframework.beans.FatalBeanException;
+import org.springframework.beans.factory.FactoryBean;
+import org.springframework.beans.factory.InitializingBean;
+import org.springframework.util.Assert;
+
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.databind.AnnotationIntrospector;
+import com.fasterxml.jackson.databind.DeserializationFeature;
+import com.fasterxml.jackson.databind.JsonDeserializer;
+import com.fasterxml.jackson.databind.JsonSerializer;
+import com.fasterxml.jackson.databind.MapperFeature;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.SerializationFeature;
+import com.fasterxml.jackson.databind.module.SimpleModule;
+
+/**
+ * A FactoryBean for creating a Jackson {@link ObjectMapper} with setters to
+ * enable or disable Jackson features from within XML configuration.
+ *
+ * <p>Example usage with
+ * {@link org.springframework.http.converter.json.MappingJackson2HttpMessageConverter}:
+ *
+ * <pre>
+ * <bean class="org.springframework.http.converter.json.MappingJackson2HttpMessageConverter">
+ * <property name="objectMapper">
+ * <bean class="org.springframework.web.context.support.Jackson2ObjectMapperFactoryBean"
+ * p:autoDetectFields="false"
+ * p:autoDetectGettersSetters="false"
+ * p:annotationIntrospector-ref="jaxbAnnotationIntrospector" />
+ * </property>
+ * </bean>
+ * </pre>
+ *
+ * <p>Example usage with {@link org.springframework.web.servlet.view.json.MappingJackson2JsonView}:
+ *
+ * <pre>
+ * <bean class="org.springframework.web.servlet.view.json.MappingJackson2JsonView">
+ * <property name="objectMapper">
+ * <bean class="org.springframework.web.context.support.Jackson2ObjectMapperFactoryBean"
+ * p:failOnEmptyBeans="false"
+ * p:indentOutput="true">
+ * <property name="serializers">
+ * <array>
+ * <bean class="org.mycompany.MyCustomSerializer" />
+ * </array>
+ * </property>
+ * </bean>
+ * </property>
+ * </bean>
+ * </pre>
+ *
+ * <p>In case there are no specific setters provided (for some rarely used
+ * options), you can still use the more general methods
+ * {@link #setFeaturesToEnable(Object[])} and {@link #setFeaturesToDisable(Object[])}.
+ *
+ * <pre>
+ * <bean class="org.springframework.web.context.support.Jackson2ObjectMapperFactoryBean">
+ * <property name="featuresToEnable">
+ * <array>
+ * <util:constant static-field="com.fasterxml.jackson.databind.SerializationFeature$WRAP_ROOT_VALUE"/>
+ * <util:constant static-field="com.fasterxml.jackson.databind.SerializationFeature$CLOSE_CLOSEABLE"/>
+ * </array>
+ * </property>
+ * <property name="featuresToDisable">
+ * <array>
+ * <util:constant static-field="com.fasterxml.jackson.databind.MapperFeature$USE_ANNOTATIONS"/>
+ * </array>
+ * </property>
+ * </bean>
+ * </pre>
+ *
+ * <p>Note: This BeanFctory is singleton, so if you need more than one you'll need
+ * to configure multiple instances.
+ *
+ * @author <a href="mailto:[email protected]">Dmitry Katsubo</a>
+ * @author Rossen Stoyanchev
+ *
+ * @since 3.2
+ */
+public class Jackson2ObjectMapperFactoryBean implements FactoryBean<ObjectMapper>, InitializingBean {
+
+ private ObjectMapper objectMapper;
+
+ private Map<Object, Boolean> features = new HashMap<Object, Boolean>();
+
+ private DateFormat dateFormat;
+
+ private AnnotationIntrospector annotationIntrospector;
+
+ private final Map<Class<?>, JsonSerializer<?>> serializers = new LinkedHashMap<Class<?>, JsonSerializer<?>>();
+
+ private final Map<Class<?>, JsonDeserializer<?>> deserializers = new LinkedHashMap<Class<?>, JsonDeserializer<?>>();
+
+
+ /**
+ * Set the ObjectMapper instance to use. If not set, the ObjectMapper will
+ * be created using its default constructor.
+ */
+ public void setObjectMapper(ObjectMapper objectMapper) {
+ this.objectMapper = objectMapper;
+ }
+
+ /**
+ * Define the format for date/time with the given {@link DateFormat}.
+ * @see #setSimpleDateFormat(String)
+ */
+ public void setDateFormat(DateFormat dateFormat) {
+ this.dateFormat = dateFormat;
+ }
+
+ /**
+ * Define the date/time format with a {@link SimpleDateFormat}.
+ * @see #setDateFormat(DateFormat)
+ */
+ public void setSimpleDateFormat(String format) {
+ this.dateFormat = new SimpleDateFormat(format);
+ }
+
+ /**
+ * Set the {@link AnnotationIntrospector} for both serialization and
+ * deserialization.
+ */
+ public void setAnnotationIntrospector(AnnotationIntrospector annotationIntrospector) {
+ this.annotationIntrospector = annotationIntrospector;
+ }
+
+ /**
+ * Configure custom serializers. Each serializer is registered for the type
+ * returned by {@link JsonSerializer#handledType()}, which must not be
+ * {@code null}.
+ * @see #setSerializersByType(Map)
+ */
+ public void setSerializers(JsonSerializer<?>... serializers) {
+ if (serializers != null) {
+ for (JsonSerializer<?> serializer : serializers) {
+ Class<?> handledType = serializer.handledType();
+ Assert.isTrue(handledType != null && handledType != Object.class,
+ "Unknown handled type in " + serializer.getClass().getName());
+ this.serializers.put(serializer.handledType(), serializer);
+ }
+ }
+ }
+
+ /**
+ * Configure custom serializers for the given types.
+ * @see #setSerializers(JsonSerializer...)
+ */
+ public void setSerializersByType(Map<Class<?>, JsonSerializer<?>> serializers) {
+ if (serializers != null) {
+ this.serializers.putAll(serializers);
+ }
+ }
+
+ /**
+ * Configure custom deserializers for the given types.
+ */
+ public void setDeserializersByType(Map<Class<?>, JsonDeserializer<?>> deserializers) {
+ if (deserializers != null) {
+ this.deserializers.putAll(deserializers);
+ }
+ }
+
+ /**
+ * Shortcut for {@link MapperFeature#AUTO_DETECT_FIELDS} option.
+ */
+ public void setAutoDetectFields(boolean autoDetectFields) {
+ this.features.put(MapperFeature.AUTO_DETECT_FIELDS, autoDetectFields);
+ }
+
+ /**
+ * Shortcut for {@link MapperFeature#AUTO_DETECT_SETTERS}/
+ * {@link MapperFeature#AUTO_DETECT_GETTERS} option.
+ */
+ public void setAutoDetectGettersSetters(boolean autoDetectGettersSetters) {
+ this.features.put(MapperFeature.AUTO_DETECT_SETTERS, autoDetectGettersSetters);
+ this.features.put(MapperFeature.AUTO_DETECT_GETTERS, autoDetectGettersSetters);
+ }
+
+ /**
+ * Shortcut for {@link SerializationFeature#FAIL_ON_EMPTY_BEANS} option.
+ */
+ public void setFailOnEmptyBeans(boolean failOnEmptyBeans) {
+ this.features.put(SerializationFeature.FAIL_ON_EMPTY_BEANS, failOnEmptyBeans);
+ }
+
+ /**
+ * Shortcut for {@link SerializationFeature#INDENT_OUTPUT} option.
+ */
+ public void setIndentOutput(boolean indentOutput) {
+ this.features.put(SerializationFeature.INDENT_OUTPUT, indentOutput);
+ }
+
+ /**
+ * Specify features to enable.
+ *
+ * @see MapperFeature
+ * @see SerializationFeature
+ * @see DeserializationFeature
+ * @see JsonParser.Feature
+ * @see JsonGenerator.Feature
+ */
+ public void setFeaturesToEnable(Object... featuresToEnable) {
+ if (featuresToEnable != null) {
+ for (Object feature : featuresToEnable) {
+ this.features.put(feature, Boolean.TRUE);
+ }
+ }
+ }
+
+ /**
+ * Specify features to disable.
+ *
+ * @see MapperFeature
+ * @see SerializationFeature
+ * @see DeserializationFeature
+ * @see JsonParser.Feature
+ * @see JsonGenerator.Feature
+ */
+ public void setFeaturesToDisable(Object... featuresToDisable) {
+ if (featuresToDisable != null) {
+ for (Object feature : featuresToDisable) {
+ this.features.put(feature, Boolean.FALSE);
+ }
+ }
+ }
+
+ public void afterPropertiesSet() throws FatalBeanException {
+ if (this.objectMapper == null) {
+ this.objectMapper = new ObjectMapper();
+ }
+
+ if (this.dateFormat != null) {
+ this.objectMapper.setDateFormat(this.dateFormat);
+ }
+
+ if (this.serializers != null || this.deserializers != null) {
+ SimpleModule module = new SimpleModule();
+ addSerializers(module);
+ addDeserializers(module);
+ this.objectMapper.registerModule(module);
+ }
+
+ if (this.annotationIntrospector != null) {
+ this.objectMapper.setAnnotationIntrospector(this.annotationIntrospector);
+ }
+
+ for (Object feature : this.features.keySet()) {
+ configureFeature(feature, this.features.get(feature));
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private <T> void addSerializers(SimpleModule module) {
+ for (Class<?> type : this.serializers.keySet()) {
+ module.addSerializer((Class<? extends T>) type, (JsonSerializer<T>) this.serializers.get(type));
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private <T> void addDeserializers(SimpleModule module) {
+ for (Class<?> type : this.deserializers.keySet()) {
+ module.addDeserializer((Class<T>) type, (JsonDeserializer<? extends T>) this.deserializers.get(type));
+ }
+ }
+
+ private void configureFeature(Object feature, boolean enabled) {
+ if (feature instanceof MapperFeature) {
+ this.objectMapper.configure((MapperFeature) feature, enabled);
+ }
+ else if (feature instanceof DeserializationFeature) {
+ this.objectMapper.configure((DeserializationFeature) feature, enabled);
+ }
+ else if (feature instanceof SerializationFeature) {
+ this.objectMapper.configure((SerializationFeature) feature, enabled);
+ }
+ else if (feature instanceof JsonParser.Feature) {
+ this.objectMapper.configure((JsonParser.Feature) feature, enabled);
+ }
+ else if (feature instanceof JsonGenerator.Feature) {
+ this.objectMapper.configure((JsonGenerator.Feature) feature, enabled);
+ }
+ else {
+ throw new FatalBeanException("Unknown feature class " + feature.getClass().getName());
+ }
+ }
+
+ /**
+ * Return the singleton ObjectMapper.
+ */
+ public ObjectMapper getObject() {
+ return this.objectMapper;
+ }
+
+ public Class<?> getObjectType() {
+ return ObjectMapper.class;
+ }
+
+ public boolean isSingleton() {
+ return true;
+ }
+
+}
diff --git a/spring-web/src/main/java/org/springframework/http/converter/json/JacksonObjectMapperFactoryBean.java b/spring-web/src/main/java/org/springframework/http/converter/json/JacksonObjectMapperFactoryBean.java
index 765e174255bd..db8a459a6a40 100644
--- a/spring-web/src/main/java/org/springframework/http/converter/json/JacksonObjectMapperFactoryBean.java
+++ b/spring-web/src/main/java/org/springframework/http/converter/json/JacksonObjectMapperFactoryBean.java
@@ -35,7 +35,7 @@
* A FactoryBean for creating a Jackson {@link ObjectMapper} with setters to
* enable or disable Jackson features from within XML configuration.
*
- * <p>Example usage with MappingJacksonHttpMessageConverter:</p>
+ * <p>Example usage with MappingJacksonHttpMessageConverter:
* <pre>
* <bean class="org.springframework.http.converter.json.MappingJacksonHttpMessageConverter">
* <property name="objectMapper">
@@ -47,7 +47,7 @@
* </bean>
* </pre>
*
- * <p>Example usage with MappingJacksonJsonView:</p>
+ * <p>Example usage with MappingJacksonJsonView:
* <pre>
* <bean class="org.springframework.web.servlet.view.json.MappingJacksonJsonView">
* <property name="objectMapper">
@@ -89,10 +89,10 @@ public class JacksonObjectMapperFactoryBean implements FactoryBean<ObjectMapper>
private Map<Object, Boolean> features = new HashMap<Object, Boolean>();
- private AnnotationIntrospector annotationIntrospector;
-
private DateFormat dateFormat;
+ private AnnotationIntrospector annotationIntrospector;
+
/**
* Set the ObjectMapper instance to use.
@@ -103,16 +103,15 @@ public void setObjectMapper(ObjectMapper objectMapper) {
}
/**
- * Define annotationIntrospector for
- * {@link SerializationConfig#setAnnotationIntrospector(AnnotationIntrospector)}.
+ * Define the format for date/time with the given {@link DateFormat}.
+ * @see #setSimpleDateFormat(String)
*/
- public void setAnnotationIntrospector(AnnotationIntrospector annotationIntrospector) {
- this.annotationIntrospector = annotationIntrospector;
+ public void setDateFormat(DateFormat dateFormat) {
+ this.dateFormat = dateFormat;
}
/**
- * Define the date/time format with the given string, which is in turn used
- * to create a {@link SimpleDateFormat}.
+ * Define the date/time format with a {@link SimpleDateFormat}.
* @see #setDateFormat(DateFormat)
*/
public void setSimpleDateFormat(String format) {
@@ -120,11 +119,12 @@ public void setSimpleDateFormat(String format) {
}
/**
- * Define the format for date/time with the given {@link DateFormat} instance.
- * @see #setSimpleDateFormat(String)
+ * Set the {@link AnnotationIntrospector} for serialization and deserialization.
+ * @see SerializationConfig#setAnnotationIntrospector(AnnotationIntrospector)
+ * @see DeserializationConfig#setAnnotationIntrospector(AnnotationIntrospector)
*/
- public void setDateFormat(DateFormat dateFormat) {
- this.dateFormat = dateFormat;
+ public void setAnnotationIntrospector(AnnotationIntrospector annotationIntrospector) {
+ this.annotationIntrospector = annotationIntrospector;
}
/**
@@ -161,6 +161,7 @@ public void setIndentOutput(boolean indentOutput) {
/**
* Specify features to enable.
+ *
* @see SerializationConfig.Feature
* @see DeserializationConfig.Feature
* @see JsonParser.Feature
@@ -176,6 +177,7 @@ public void setFeaturesToEnable(Object[] featuresToEnable) {
/**
* Specify features to disable.
+ *
* @see SerializationConfig.Feature
* @see DeserializationConfig.Feature
* @see JsonParser.Feature
@@ -189,7 +191,6 @@ public void setFeaturesToDisable(Object[] featuresToDisable) {
}
}
-
public void afterPropertiesSet() {
if (this.objectMapper == null) {
this.objectMapper = new ObjectMapper();
@@ -203,11 +204,11 @@ public void afterPropertiesSet() {
this.objectMapper.getSerializationConfig().setDateFormat(this.dateFormat);
}
for (Map.Entry<Object, Boolean> entry : this.features.entrySet()) {
- setFeatureEnabled(entry.getKey(), entry.getValue());
+ configureFeature(entry.getKey(), entry.getValue().booleanValue());
}
}
- private void setFeatureEnabled(Object feature, boolean enabled) {
+ private void configureFeature(Object feature, boolean enabled) {
if (feature instanceof DeserializationConfig.Feature) {
this.objectMapper.configure((DeserializationConfig.Feature) feature, enabled);
}
@@ -225,7 +226,9 @@ else if (feature instanceof JsonGenerator.Feature) {
}
}
-
+ /**
+ * Return the singleton ObjectMapper.
+ */
public ObjectMapper getObject() {
return this.objectMapper;
}
diff --git a/spring-web/src/test/java/org/springframework/http/converter/json/Jackson2ObjectMapperFactoryBeanTests.java b/spring-web/src/test/java/org/springframework/http/converter/json/Jackson2ObjectMapperFactoryBeanTests.java
new file mode 100644
index 000000000000..bc52e4c40227
--- /dev/null
+++ b/spring-web/src/test/java/org/springframework/http/converter/json/Jackson2ObjectMapperFactoryBeanTests.java
@@ -0,0 +1,191 @@
+/*
+ * Copyright 2002-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.http.converter.json;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.springframework.beans.DirectFieldAccessor;
+import org.springframework.beans.FatalBeanException;
+import org.springframework.http.converter.json.Jackson2ObjectMapperFactoryBean;
+
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.databind.DeserializationFeature;
+import com.fasterxml.jackson.databind.JsonDeserializer;
+import com.fasterxml.jackson.databind.MapperFeature;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.SerializationFeature;
+import com.fasterxml.jackson.databind.cfg.DeserializerFactoryConfig;
+import com.fasterxml.jackson.databind.cfg.SerializerFactoryConfig;
+import com.fasterxml.jackson.databind.deser.std.DateDeserializers.DateDeserializer;
+import com.fasterxml.jackson.databind.introspect.NopAnnotationIntrospector;
+import com.fasterxml.jackson.databind.ser.std.StdJdkSerializers.ClassSerializer;
+
+/**
+ * Test cases for {@link Jackson2ObjectMapperFactoryBean} class.
+ *
+ * @author <a href="mailto:[email protected]">Dmitry Katsubo</a>
+ */
+public class Jackson2ObjectMapperFactoryBeanTests {
+
+ private static final String DATE_FORMAT = "yyyy-MM-dd";
+
+ private Jackson2ObjectMapperFactoryBean factory;
+
+ @Before
+ public void setUp() {
+ factory = new Jackson2ObjectMapperFactoryBean();
+ }
+
+ @Test
+ public void testSetFeaturesToEnableEmpty() {
+ this.factory.setFeaturesToEnable(new Object[0]);
+ this.factory.setFeaturesToDisable(new Object[0]);
+ }
+
+ @Test(expected = FatalBeanException.class)
+ public void testUnknownFeature() {
+ this.factory.setFeaturesToEnable(new Object[] { Boolean.TRUE });
+ this.factory.afterPropertiesSet();
+ }
+
+ @Test
+ public void testBooleanSetters() {
+ this.factory.setAutoDetectFields(false);
+ this.factory.setAutoDetectGettersSetters(false);
+ this.factory.setFailOnEmptyBeans(false);
+ this.factory.setIndentOutput(true);
+ this.factory.afterPropertiesSet();
+
+ ObjectMapper objectMapper = this.factory.getObject();
+
+ assertFalse(objectMapper.getSerializationConfig().isEnabled(MapperFeature.AUTO_DETECT_FIELDS));
+ assertFalse(objectMapper.getDeserializationConfig().isEnabled(MapperFeature.AUTO_DETECT_FIELDS));
+ assertFalse(objectMapper.getSerializationConfig().isEnabled(MapperFeature.AUTO_DETECT_GETTERS));
+ assertFalse(objectMapper.getDeserializationConfig().isEnabled(MapperFeature.AUTO_DETECT_SETTERS));
+ assertFalse(objectMapper.getSerializationConfig().isEnabled(SerializationFeature.FAIL_ON_EMPTY_BEANS));
+ assertTrue(objectMapper.getSerializationConfig().isEnabled(SerializationFeature.INDENT_OUTPUT));
+ }
+
+ @Test
+ public void testDateTimeFormatSetter() {
+ SimpleDateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT);
+
+ this.factory.setDateFormat(dateFormat);
+ this.factory.afterPropertiesSet();
+
+ assertEquals(dateFormat, this.factory.getObject().getSerializationConfig().getDateFormat());
+ assertEquals(dateFormat, this.factory.getObject().getDeserializationConfig().getDateFormat());
+ }
+
+ @Test
+ public void testSimpleDateFormatStringSetter() {
+ SimpleDateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT);
+
+ this.factory.setSimpleDateFormat(DATE_FORMAT);
+ this.factory.afterPropertiesSet();
+
+ assertEquals(dateFormat, this.factory.getObject().getSerializationConfig().getDateFormat());
+ assertEquals(dateFormat, this.factory.getObject().getDeserializationConfig().getDateFormat());
+ }
+
+ @Test
+ public void testSimpleSetup() {
+ this.factory.afterPropertiesSet();
+
+ assertNotNull(this.factory.getObject());
+ assertTrue(this.factory.isSingleton());
+ assertEquals(ObjectMapper.class, this.factory.getObjectType());
+ }
+
+ /**
+ * TODO: Remove use of {@link DirectFieldAccessor} with getters.
+ * See <a href="https://github.com/FasterXML/jackson-databind/issues/65">issue#65</a>.
+ */
+ private static final SerializerFactoryConfig getSerializerFactoryConfig(ObjectMapper objectMapper) {
+ Object factoryProp = new DirectFieldAccessor(objectMapper).getPropertyValue("_serializerFactory");
+ return (SerializerFactoryConfig) new DirectFieldAccessor(factoryProp).getPropertyValue("_factoryConfig");
+ }
+
+ private static final DeserializerFactoryConfig getDeserializerFactoryConfig(ObjectMapper objectMapper) {
+ Object contextProp = new DirectFieldAccessor(objectMapper).getPropertyValue("_deserializationContext");
+ Object factoryProp = new DirectFieldAccessor(contextProp).getPropertyValue("_factory");
+ return (DeserializerFactoryConfig) new DirectFieldAccessor(factoryProp).getPropertyValue("_factoryConfig");
+ }
+
+ @Test
+ public void testCompleteSetup() {
+ NopAnnotationIntrospector annotationIntrospector = NopAnnotationIntrospector.instance;
+ ObjectMapper objectMapper = new ObjectMapper();
+
+ assertTrue(this.factory.isSingleton());
+ assertEquals(ObjectMapper.class, this.factory.getObjectType());
+
+ Map<Class<?>, JsonDeserializer<?>> deserializers = new HashMap<Class<?>, JsonDeserializer<?>>();
+ deserializers.put(Date.class, new DateDeserializer());
+
+ this.factory.setObjectMapper(objectMapper);
+ this.factory.setSerializers(new ClassSerializer());
+ this.factory.setDeserializersByType(deserializers);
+ this.factory.setAnnotationIntrospector(annotationIntrospector);
+
+ this.factory.setFeaturesToEnable(
+ SerializationFeature.FAIL_ON_EMPTY_BEANS,
+ DeserializationFeature.UNWRAP_ROOT_VALUE,
+ JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER,
+ JsonGenerator.Feature.WRITE_NUMBERS_AS_STRINGS);
+
+ this.factory.setFeaturesToDisable(
+ MapperFeature.AUTO_DETECT_GETTERS,
+ MapperFeature.AUTO_DETECT_FIELDS,
+ JsonParser.Feature.AUTO_CLOSE_SOURCE,
+ JsonGenerator.Feature.QUOTE_FIELD_NAMES);
+
+ assertFalse(getSerializerFactoryConfig(objectMapper).hasSerializers());
+ assertFalse(getDeserializerFactoryConfig(objectMapper).hasDeserializers());
+
+ this.factory.afterPropertiesSet();
+
+ assertTrue(objectMapper == this.factory.getObject());
+
+ assertTrue(getSerializerFactoryConfig(objectMapper).hasSerializers());
+ assertTrue(getDeserializerFactoryConfig(objectMapper).hasDeserializers());
+
+ assertTrue(annotationIntrospector == objectMapper.getSerializationConfig().getAnnotationIntrospector());
+ assertTrue(annotationIntrospector == objectMapper.getDeserializationConfig().getAnnotationIntrospector());
+
+ assertTrue(objectMapper.getSerializationConfig().isEnabled(SerializationFeature.FAIL_ON_EMPTY_BEANS));
+ assertTrue(objectMapper.getDeserializationConfig().isEnabled(DeserializationFeature.UNWRAP_ROOT_VALUE));
+ assertTrue(objectMapper.getJsonFactory().isEnabled(JsonParser.Feature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER));
+ assertTrue(objectMapper.getJsonFactory().isEnabled(JsonGenerator.Feature.WRITE_NUMBERS_AS_STRINGS));
+
+ assertFalse(objectMapper.getSerializationConfig().isEnabled(MapperFeature.AUTO_DETECT_GETTERS));
+ assertFalse(objectMapper.getDeserializationConfig().isEnabled(MapperFeature.AUTO_DETECT_FIELDS));
+ assertFalse(objectMapper.getJsonFactory().isEnabled(JsonParser.Feature.AUTO_CLOSE_SOURCE));
+ assertFalse(objectMapper.getJsonFactory().isEnabled(JsonGenerator.Feature.QUOTE_FIELD_NAMES));
+ }
+}
diff --git a/spring-web/src/test/java/org/springframework/http/converter/json/JacksonObjectMapperFactoryBeanTests.java b/spring-web/src/test/java/org/springframework/http/converter/json/JacksonObjectMapperFactoryBeanTests.java
index 3a22dfddf1ab..a66f4e2fbb8d 100644
--- a/spring-web/src/test/java/org/springframework/http/converter/json/JacksonObjectMapperFactoryBeanTests.java
+++ b/spring-web/src/test/java/org/springframework/http/converter/json/JacksonObjectMapperFactoryBeanTests.java
@@ -32,8 +32,6 @@
import org.codehaus.jackson.map.introspect.NopAnnotationIntrospector;
import org.junit.Before;
import org.junit.Test;
-import org.springframework.beans.FatalBeanException;
-import org.springframework.http.converter.json.JacksonObjectMapperFactoryBean;
/**
* @author <a href="mailto:[email protected]">Dmitry Katsubo</a>
diff --git a/spring-web/src/test/java/org/springframework/http/converter/json/MappingJackson2HttpMessageConverterTests.java b/spring-web/src/test/java/org/springframework/http/converter/json/MappingJackson2HttpMessageConverterTests.java
index 603b6613abec..3579c5d4644d 100644
--- a/spring-web/src/test/java/org/springframework/http/converter/json/MappingJackson2HttpMessageConverterTests.java
+++ b/spring-web/src/test/java/org/springframework/http/converter/json/MappingJackson2HttpMessageConverterTests.java
@@ -52,7 +52,7 @@ public void readGenerics() throws IOException {
@Override
protected JavaType getJavaType(Type type) {
- if (type instanceof Class && List.class.isAssignableFrom((Class)type)) {
+ if (type instanceof Class && List.class.isAssignableFrom((Class<?>)type)) {
return new ObjectMapper().getTypeFactory().constructCollectionType(ArrayList.class, MyBean.class);
}
else {
diff --git a/spring-web/src/test/java/org/springframework/http/converter/json/MappingJacksonHttpMessageConverterTests.java b/spring-web/src/test/java/org/springframework/http/converter/json/MappingJacksonHttpMessageConverterTests.java
index c3fe567a69cb..5f70a4d6d4b7 100644
--- a/spring-web/src/test/java/org/springframework/http/converter/json/MappingJacksonHttpMessageConverterTests.java
+++ b/spring-web/src/test/java/org/springframework/http/converter/json/MappingJacksonHttpMessageConverterTests.java
@@ -50,7 +50,7 @@ public void readGenerics() throws IOException {
MappingJacksonHttpMessageConverter converter = new MappingJacksonHttpMessageConverter() {
@Override
protected JavaType getJavaType(Type type) {
- if (type instanceof Class && List.class.isAssignableFrom((Class)type)) {
+ if (type instanceof Class && List.class.isAssignableFrom((Class<?>)type)) {
return TypeFactory.collectionType(ArrayList.class, MyBean.class);
}
else {
|
afb690e2337caa1cb6b34ca921ae7d0edb7600b9
|
elasticsearch
|
refactor sub fetch phase to also allow for hits- level execution--
|
a
|
https://github.com/elastic/elasticsearch
|
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/search/SearchModule.java b/modules/elasticsearch/src/main/java/org/elasticsearch/search/SearchModule.java
index ab3d2d2909115..2251e3c64eeca 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/search/SearchModule.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/search/SearchModule.java
@@ -28,10 +28,10 @@
import org.elasticsearch.search.dfs.DfsPhase;
import org.elasticsearch.search.facet.FacetModule;
import org.elasticsearch.search.fetch.FetchPhase;
-import org.elasticsearch.search.fetch.explain.ExplainSearchHitPhase;
-import org.elasticsearch.search.fetch.matchedfilters.MatchedFiltersSearchHitPhase;
-import org.elasticsearch.search.fetch.script.ScriptFieldsSearchHitPhase;
-import org.elasticsearch.search.fetch.version.VersionSearchHitPhase;
+import org.elasticsearch.search.fetch.explain.ExplainFetchSubPhase;
+import org.elasticsearch.search.fetch.matchedfilters.MatchedFiltersFetchSubPhase;
+import org.elasticsearch.search.fetch.script.ScriptFieldsFetchSubPhase;
+import org.elasticsearch.search.fetch.version.VersionFetchSubPhase;
import org.elasticsearch.search.highlight.HighlightPhase;
import org.elasticsearch.search.query.QueryPhase;
@@ -51,10 +51,10 @@ public class SearchModule extends AbstractModule implements SpawnModules {
bind(SearchPhaseController.class).asEagerSingleton();
bind(FetchPhase.class).asEagerSingleton();
- bind(ExplainSearchHitPhase.class).asEagerSingleton();
- bind(ScriptFieldsSearchHitPhase.class).asEagerSingleton();
- bind(VersionSearchHitPhase.class).asEagerSingleton();
- bind(MatchedFiltersSearchHitPhase.class).asEagerSingleton();
+ bind(ExplainFetchSubPhase.class).asEagerSingleton();
+ bind(ScriptFieldsFetchSubPhase.class).asEagerSingleton();
+ bind(VersionFetchSubPhase.class).asEagerSingleton();
+ bind(MatchedFiltersFetchSubPhase.class).asEagerSingleton();
bind(HighlightPhase.class).asEagerSingleton();
bind(SearchServiceTransportAction.class).asEagerSingleton();
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java
index 88addc6cc1fbe..effa9bb1b8581 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java
@@ -41,10 +41,10 @@
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchPhase;
-import org.elasticsearch.search.fetch.explain.ExplainSearchHitPhase;
-import org.elasticsearch.search.fetch.matchedfilters.MatchedFiltersSearchHitPhase;
-import org.elasticsearch.search.fetch.script.ScriptFieldsSearchHitPhase;
-import org.elasticsearch.search.fetch.version.VersionSearchHitPhase;
+import org.elasticsearch.search.fetch.explain.ExplainFetchSubPhase;
+import org.elasticsearch.search.fetch.matchedfilters.MatchedFiltersFetchSubPhase;
+import org.elasticsearch.search.fetch.script.ScriptFieldsFetchSubPhase;
+import org.elasticsearch.search.fetch.version.VersionFetchSubPhase;
import org.elasticsearch.search.highlight.HighlightPhase;
import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.InternalSearchHitField;
@@ -62,18 +62,18 @@
*/
public class FetchPhase implements SearchPhase {
- private final SearchHitPhase[] hitPhases;
+ private final FetchSubPhase[] fetchSubPhases;
- @Inject public FetchPhase(HighlightPhase highlightPhase, ScriptFieldsSearchHitPhase scriptFieldsPhase,
- MatchedFiltersSearchHitPhase matchFiltersPhase, ExplainSearchHitPhase explainPhase, VersionSearchHitPhase versionPhase) {
- this.hitPhases = new SearchHitPhase[]{scriptFieldsPhase, matchFiltersPhase, explainPhase, highlightPhase, versionPhase};
+ @Inject public FetchPhase(HighlightPhase highlightPhase, ScriptFieldsFetchSubPhase scriptFieldsPhase,
+ MatchedFiltersFetchSubPhase matchFiltersPhase, ExplainFetchSubPhase explainPhase, VersionFetchSubPhase versionPhase) {
+ this.fetchSubPhases = new FetchSubPhase[]{scriptFieldsPhase, matchFiltersPhase, explainPhase, highlightPhase, versionPhase};
}
@Override public Map<String, ? extends SearchParseElement> parseElements() {
ImmutableMap.Builder<String, SearchParseElement> parseElements = ImmutableMap.builder();
parseElements.put("fields", new FieldsParseElement());
- for (SearchHitPhase hitPhase : hitPhases) {
- parseElements.putAll(hitPhase.parseElements());
+ for (FetchSubPhase fetchSubPhase : fetchSubPhases) {
+ parseElements.putAll(fetchSubPhase.parseElements());
}
return parseElements.build();
}
@@ -199,14 +199,21 @@ public void execute(SearchContext context) {
}
}
- for (SearchHitPhase hitPhase : hitPhases) {
- SearchHitPhase.HitContext hitContext = new SearchHitPhase.HitContext();
- if (hitPhase.executionNeeded(context)) {
+ for (FetchSubPhase fetchSubPhase : fetchSubPhases) {
+ FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext();
+ if (fetchSubPhase.hitExecutionNeeded(context)) {
hitContext.reset(searchHit, subReader, subDoc, doc);
- hitPhase.execute(context, hitContext);
+ fetchSubPhase.hitExecute(context, hitContext);
}
}
}
+
+ for (FetchSubPhase fetchSubPhase : fetchSubPhases) {
+ if (fetchSubPhase.hitsExecutionNeeded(context)) {
+ fetchSubPhase.hitsExecute(context, hits);
+ }
+ }
+
context.fetchResult().hits(new InternalSearchHits(hits, context.queryResult().topDocs().totalHits, context.queryResult().topDocs().getMaxScore()));
}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/SearchHitPhase.java b/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java
similarity index 85%
rename from modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/SearchHitPhase.java
rename to modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java
index 7ce7ef9c13ad5..e234901fea855 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/SearchHitPhase.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java
@@ -22,7 +22,6 @@
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.ElasticSearchException;
-import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.SearchContext;
@@ -32,7 +31,7 @@
/**
* @author kimchy (shay.banon)
*/
-public interface SearchHitPhase {
+public interface FetchSubPhase {
public static class HitContext {
private InternalSearchHit hit;
@@ -66,10 +65,14 @@ public Document doc() {
Map<String, ? extends SearchParseElement> parseElements();
- boolean executionNeeded(SearchContext context);
+ boolean hitExecutionNeeded(SearchContext context);
/**
* Executes the hit level phase, with a reader and doc id (note, its a low level reader, and the matching doc).
*/
- void execute(SearchContext context, HitContext hitContext) throws ElasticSearchException;
+ void hitExecute(SearchContext context, HitContext hitContext) throws ElasticSearchException;
+
+ boolean hitsExecutionNeeded(SearchContext context);
+
+ void hitsExecute(SearchContext context, InternalSearchHit[] hits) throws ElasticSearchException;
}
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/explain/ExplainSearchHitPhase.java b/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/explain/ExplainFetchSubPhase.java
similarity index 75%
rename from modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/explain/ExplainSearchHitPhase.java
rename to modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/explain/ExplainFetchSubPhase.java
index 0c51cd4c09396..396e10c7354d1 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/explain/ExplainSearchHitPhase.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/explain/ExplainFetchSubPhase.java
@@ -23,7 +23,8 @@
import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
-import org.elasticsearch.search.fetch.SearchHitPhase;
+import org.elasticsearch.search.fetch.FetchSubPhase;
+import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@@ -32,17 +33,24 @@
/**
* @author kimchy (shay.banon)
*/
-public class ExplainSearchHitPhase implements SearchHitPhase {
+public class ExplainFetchSubPhase implements FetchSubPhase {
@Override public Map<String, ? extends SearchParseElement> parseElements() {
return ImmutableMap.of("explain", new ExplainParseElement());
}
- @Override public boolean executionNeeded(SearchContext context) {
+ @Override public boolean hitsExecutionNeeded(SearchContext context) {
+ return false;
+ }
+
+ @Override public void hitsExecute(SearchContext context, InternalSearchHit[] hits) throws ElasticSearchException {
+ }
+
+ @Override public boolean hitExecutionNeeded(SearchContext context) {
return context.explain();
}
- @Override public void execute(SearchContext context, HitContext hitContext) throws ElasticSearchException {
+ @Override public void hitExecute(SearchContext context, HitContext hitContext) throws ElasticSearchException {
try {
// we use the top level doc id, since we work with the top level searcher
hitContext.hit().explanation(context.searcher().explain(context.query(), hitContext.hit().docId()));
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/matchedfilters/MatchedFiltersSearchHitPhase.java b/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/matchedfilters/MatchedFiltersFetchSubPhase.java
similarity index 82%
rename from modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/matchedfilters/MatchedFiltersSearchHitPhase.java
rename to modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/matchedfilters/MatchedFiltersFetchSubPhase.java
index 79322cde8e4ca..452337809c47f 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/matchedfilters/MatchedFiltersSearchHitPhase.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/matchedfilters/MatchedFiltersFetchSubPhase.java
@@ -19,7 +19,6 @@
package org.elasticsearch.search.fetch.matchedfilters;
-import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.elasticsearch.ElasticSearchException;
@@ -27,9 +26,8 @@
import org.elasticsearch.common.collect.Lists;
import org.elasticsearch.common.lucene.docset.DocSet;
import org.elasticsearch.common.lucene.docset.DocSets;
-import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.search.SearchParseElement;
-import org.elasticsearch.search.fetch.SearchHitPhase;
+import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.SearchContext;
@@ -40,17 +38,24 @@
/**
* @author kimchy (shay.banon)
*/
-public class MatchedFiltersSearchHitPhase implements SearchHitPhase {
+public class MatchedFiltersFetchSubPhase implements FetchSubPhase {
@Override public Map<String, ? extends SearchParseElement> parseElements() {
return ImmutableMap.of();
}
- @Override public boolean executionNeeded(SearchContext context) {
+ @Override public boolean hitsExecutionNeeded(SearchContext context) {
+ return false;
+ }
+
+ @Override public void hitsExecute(SearchContext context, InternalSearchHit[] hits) throws ElasticSearchException {
+ }
+
+ @Override public boolean hitExecutionNeeded(SearchContext context) {
return !context.parsedQuery().namedFilters().isEmpty();
}
- @Override public void execute(SearchContext context, HitContext hitContext) throws ElasticSearchException {
+ @Override public void hitExecute(SearchContext context, HitContext hitContext) throws ElasticSearchException {
List<String> matchedFilters = Lists.newArrayListWithCapacity(2);
for (Map.Entry<String, Filter> entry : context.parsedQuery().namedFilters().entrySet()) {
String name = entry.getKey();
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsSearchHitPhase.java b/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsFetchSubPhase.java
similarity index 81%
rename from modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsSearchHitPhase.java
rename to modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsFetchSubPhase.java
index 97945828a5809..7ca09ed4fd977 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsSearchHitPhase.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/script/ScriptFieldsFetchSubPhase.java
@@ -24,7 +24,8 @@
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchParseElement;
-import org.elasticsearch.search.fetch.SearchHitPhase;
+import org.elasticsearch.search.fetch.FetchSubPhase;
+import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.InternalSearchHitField;
import org.elasticsearch.search.internal.SearchContext;
@@ -35,9 +36,9 @@
/**
* @author kimchy (shay.banon)
*/
-public class ScriptFieldsSearchHitPhase implements SearchHitPhase {
+public class ScriptFieldsFetchSubPhase implements FetchSubPhase {
- @Inject public ScriptFieldsSearchHitPhase() {
+ @Inject public ScriptFieldsFetchSubPhase() {
}
@Override public Map<String, ? extends SearchParseElement> parseElements() {
@@ -47,11 +48,18 @@ public class ScriptFieldsSearchHitPhase implements SearchHitPhase {
return parseElements.build();
}
- @Override public boolean executionNeeded(SearchContext context) {
+ @Override public boolean hitsExecutionNeeded(SearchContext context) {
+ return false;
+ }
+
+ @Override public void hitsExecute(SearchContext context, InternalSearchHit[] hits) throws ElasticSearchException {
+ }
+
+ @Override public boolean hitExecutionNeeded(SearchContext context) {
return context.hasScriptFields();
}
- @Override public void execute(SearchContext context, HitContext hitContext) throws ElasticSearchException {
+ @Override public void hitExecute(SearchContext context, HitContext hitContext) throws ElasticSearchException {
for (ScriptFieldsContext.ScriptField scriptField : context.scriptFields().fields()) {
scriptField.script().setNextReader(hitContext.reader());
scriptField.script().setNextDocId(hitContext.docId());
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/version/VersionSearchHitPhase.java b/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/version/VersionFetchSubPhase.java
similarity index 76%
rename from modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/version/VersionSearchHitPhase.java
rename to modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/version/VersionFetchSubPhase.java
index c44f1f02ce111..bbb42ca4d33f0 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/version/VersionSearchHitPhase.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/search/fetch/version/VersionFetchSubPhase.java
@@ -24,7 +24,8 @@
import org.elasticsearch.common.lucene.uid.UidField;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.search.SearchParseElement;
-import org.elasticsearch.search.fetch.SearchHitPhase;
+import org.elasticsearch.search.fetch.FetchSubPhase;
+import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.SearchContext;
import java.util.Map;
@@ -32,17 +33,24 @@
/**
* @author kimchy (shay.banon)
*/
-public class VersionSearchHitPhase implements SearchHitPhase {
+public class VersionFetchSubPhase implements FetchSubPhase {
@Override public Map<String, ? extends SearchParseElement> parseElements() {
return ImmutableMap.of("version", new VersionParseElement());
}
- @Override public boolean executionNeeded(SearchContext context) {
+ @Override public boolean hitsExecutionNeeded(SearchContext context) {
+ return false;
+ }
+
+ @Override public void hitsExecute(SearchContext context, InternalSearchHit[] hits) throws ElasticSearchException {
+ }
+
+ @Override public boolean hitExecutionNeeded(SearchContext context) {
return context.version();
}
- @Override public void execute(SearchContext context, HitContext hitContext) throws ElasticSearchException {
+ @Override public void hitExecute(SearchContext context, HitContext hitContext) throws ElasticSearchException {
// it might make sense to cache the TermDocs on a shared fetch context and just skip here)
// it is going to mean we work on the high level multi reader and not the lower level reader as is
// the case below...
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java b/modules/elasticsearch/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java
index 34a7d357aa6d6..b4c54da436779 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java
@@ -40,9 +40,10 @@
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
-import org.elasticsearch.search.fetch.SearchHitPhase;
+import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.highlight.vectorhighlight.SourceScoreOrderFragmentsBuilder;
import org.elasticsearch.search.highlight.vectorhighlight.SourceSimpleFragmentsBuilder;
+import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
@@ -58,7 +59,7 @@
/**
* @author kimchy (shay.banon)
*/
-public class HighlightPhase implements SearchHitPhase {
+public class HighlightPhase implements FetchSubPhase {
public static class Encoders {
public static Encoder DEFAULT = new DefaultEncoder();
@@ -69,11 +70,18 @@ public static class Encoders {
return ImmutableMap.of("highlight", new HighlighterParseElement());
}
- @Override public boolean executionNeeded(SearchContext context) {
+ @Override public boolean hitsExecutionNeeded(SearchContext context) {
+ return false;
+ }
+
+ @Override public void hitsExecute(SearchContext context, InternalSearchHit[] hits) throws ElasticSearchException {
+ }
+
+ @Override public boolean hitExecutionNeeded(SearchContext context) {
return context.highlight() != null;
}
- @Override public void execute(SearchContext context, HitContext hitContext) throws ElasticSearchException {
+ @Override public void hitExecute(SearchContext context, HitContext hitContext) throws ElasticSearchException {
try {
DocumentMapper documentMapper = context.mapperService().documentMapper(hitContext.hit().type());
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java b/modules/elasticsearch/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java
index aae46cbf440df..33b763d964918 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java
@@ -69,7 +69,7 @@ public class InternalSearchHit implements SearchHit {
private Map<String, SearchHitField> fields = ImmutableMap.of();
- private Map<String, HighlightField> highlightFields = ImmutableMap.of();
+ private Map<String, HighlightField> highlightFields = null;
private Object[] sortValues = EMPTY_SORT_VALUES;
@@ -230,7 +230,14 @@ public void fields(Map<String, SearchHitField> fields) {
this.fields = fields;
}
+ public Map<String, HighlightField> internalHighlightFields() {
+ return highlightFields;
+ }
+
@Override public Map<String, HighlightField> highlightFields() {
+ if (highlightFields == null) {
+ return ImmutableMap.of();
+ }
return this.highlightFields;
}
|
1e971c6aa481b2794369ce66173ebeb77c08350a
|
intellij-community
|
intellilang avoid costly service lookups--
|
p
|
https://github.com/JetBrains/intellij-community
|
diff --git a/platform/lang-api/src/com/intellij/lang/injection/InjectedLanguageManager.java b/platform/lang-api/src/com/intellij/lang/injection/InjectedLanguageManager.java
index bf742113bc9f7..9ba141ac9117a 100644
--- a/platform/lang-api/src/com/intellij/lang/injection/InjectedLanguageManager.java
+++ b/platform/lang-api/src/com/intellij/lang/injection/InjectedLanguageManager.java
@@ -26,6 +26,7 @@
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.project.Project;
+import com.intellij.openapi.util.NotNullLazyKey;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
@@ -41,8 +42,10 @@ public abstract class InjectedLanguageManager implements ProjectComponent {
@Deprecated
public static final ExtensionPointName<MultiHostInjector> MULTIHOST_INJECTOR_EP_NAME = MultiHostInjector.MULTIHOST_INJECTOR_EP_NAME;
+ private final static NotNullLazyKey<InjectedLanguageManager, Project> INSTANCE_CACHE = ServiceManager.createLazyKey(InjectedLanguageManager.class);
+
public static InjectedLanguageManager getInstance(Project project) {
- return ServiceManager.getService(project, InjectedLanguageManager.class);
+ return INSTANCE_CACHE.getValue(project);
}
@Nullable
|
cc2f077d437139e79052fbafa37a2065f5a1bd1d
|
hadoop
|
HADOOP-6452 Hadoop JSP pages don't work under a- security manager--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@893490 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/src/java/org/apache/hadoop/security/authorize/ConfiguredPolicy.java b/src/java/org/apache/hadoop/security/authorize/ConfiguredPolicy.java
index 6b90829aa854b..da06b08ec3d38 100644
--- a/src/java/org/apache/hadoop/security/authorize/ConfiguredPolicy.java
+++ b/src/java/org/apache/hadoop/security/authorize/ConfiguredPolicy.java
@@ -22,6 +22,8 @@
import java.security.Policy;
import java.security.Principal;
import java.security.ProtectionDomain;
+import java.security.CodeSource;
+import java.security.Permissions;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
@@ -77,6 +79,19 @@ public boolean implies(ProtectionDomain domain, Permission permission) {
return super.implies(domain, permission);
}
+ /**
+ * {@inheritDoc}
+ * @return a writable permission collection
+ */
+ @Override
+ public PermissionCollection getPermissions(CodeSource codesource) {
+ return new Permissions();
+ }
+
+ /**
+ * {@inheritDoc}
+ * @return a writable permission collection
+ */
@Override
public PermissionCollection getPermissions(ProtectionDomain domain) {
PermissionCollection permissionCollection = super.getPermissions(domain);
@@ -153,4 +168,14 @@ private void addPermission(Map<Principal, Set<Permission>> permissions,
LOG.debug("Policy - Adding " + permission + " to " + principal);
}
}
+
+ /**
+ * For debugging: identify ourselves and the policyproviders
+ *
+ * @return a string representation of the object.
+ */
+ @Override
+ public String toString() {
+ return "Hadoop ConfiguredPolicy " + super.toString() + " Policy provider "+ policyProvider;
+ }
}
diff --git a/src/test/core/org/apache/hadoop/security/authorize/TestConfiguredPolicy.java b/src/test/core/org/apache/hadoop/security/authorize/TestConfiguredPolicy.java
index 203946cabd8f3..898631eb77aab 100644
--- a/src/test/core/org/apache/hadoop/security/authorize/TestConfiguredPolicy.java
+++ b/src/test/core/org/apache/hadoop/security/authorize/TestConfiguredPolicy.java
@@ -17,7 +17,13 @@
*/
package org.apache.hadoop.security.authorize;
-import java.security.Permission;
+import java.security.CodeSource;
+import java.security.CodeSigner;
+import java.security.PermissionCollection;
+import java.security.ProtectionDomain;
+import java.net.URL;
+import java.net.NetPermission;
+import java.net.MalformedURLException;
import javax.security.auth.Subject;
@@ -36,7 +42,7 @@ public class TestConfiguredPolicy extends TestCase {
private static final String KEY_1 = "test.policy.1";
private static final String KEY_2 = "test.policy.2";
-
+
public static class Protocol1 {
int i;
}
@@ -55,11 +61,7 @@ public Service[] getServices() {
}
public void testConfiguredPolicy() throws Exception {
- Configuration conf = new Configuration();
- conf.set(KEY_1, AccessControlList.WILDCARD_ACL_VALUE);
- conf.set(KEY_2, USER1 + " " + GROUPS1[0]);
-
- ConfiguredPolicy policy = new ConfiguredPolicy(conf, new TestPolicyProvider());
+ ConfiguredPolicy policy = createConfiguredPolicy();
SecurityUtil.setPolicy(policy);
Subject user1 =
@@ -79,4 +81,60 @@ public void testConfiguredPolicy() throws Exception {
}
assertTrue(failed);
}
+
+ /**
+ * Create a configured policy with some keys
+ * @return a new configured policy
+ */
+ private ConfiguredPolicy createConfiguredPolicy() {
+ Configuration conf = new Configuration();
+ conf.set(KEY_1, AccessControlList.WILDCARD_ACL_VALUE);
+ conf.set(KEY_2, USER1 + " " + GROUPS1[0]);
+
+ return new ConfiguredPolicy(conf, new TestPolicyProvider());
+ }
+
+ /**
+ * Create a test code source against a test URL
+ * @return a new code source
+ * @throws MalformedURLException
+ */
+ private CodeSource createCodeSource() throws MalformedURLException {
+ return new CodeSource(new URL("http://hadoop.apache.org"),
+ (CodeSigner[]) null);
+ }
+
+ /**
+ * Assert that a permission collection can have new permissions added
+ * @param permissions the collection to check
+ */
+ private void assertWritable(PermissionCollection permissions) {
+ assertFalse(permissions.isReadOnly());
+ NetPermission netPermission = new NetPermission("something");
+ permissions.add(netPermission);
+ }
+
+ /**
+ * test that the {@link PermissionCollection} returned by
+ * {@link ConfiguredPolicy#getPermissions(CodeSource)} is writeable
+ * @throws Throwable on any failure
+ */
+ public void testPolicyWritable() throws Throwable {
+ ConfiguredPolicy policy = createConfiguredPolicy();
+ CodeSource source = createCodeSource();
+ PermissionCollection permissions = policy.getPermissions(source);
+ assertWritable(permissions);
+ }
+
+ /**
+ * test that the {@link PermissionCollection} returned by
+ * {@link ConfiguredPolicy#getPermissions(CodeSource)} is writeable
+ * @throws Throwable on any failure
+ */
+ public void testProtectionDomainPolicyWritable() throws Throwable {
+ ConfiguredPolicy policy = createConfiguredPolicy();
+ CodeSource source = createCodeSource();
+ PermissionCollection permissions = policy.getPermissions(new ProtectionDomain(source, null));
+ assertWritable(permissions);
+ }
}
|
3c24b6a24d66ac927b94ce808ee0e395cc008e0b
|
hbase
|
HBASE-10194 [Usability]: Instructions in- CompactionTool no longer accurate because of namespaces--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1552229 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/hbase
|
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
index f5f67c509cff..62204b354aa5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
@@ -456,10 +456,10 @@ private void printUsage(final String message) {
System.err.println();
System.err.println("Examples:");
System.err.println(" To compact the full 'TestTable' using MapReduce:");
- System.err.println(" $ bin/hbase " + this.getClass().getName() + " -mapred hdfs:///hbase/TestTable");
+ System.err.println(" $ bin/hbase " + this.getClass().getName() + " -mapred hdfs:///hbase/data/default/TestTable");
System.err.println();
System.err.println(" To compact column family 'x' of the table 'TestTable' region 'abc':");
- System.err.println(" $ bin/hbase " + this.getClass().getName() + " hdfs:///hbase/TestTable/abc/x");
+ System.err.println(" $ bin/hbase " + this.getClass().getName() + " hdfs:///hbase/data/default/TestTable/abc/x");
}
public static void main(String[] args) throws Exception {
|
8b57c2d95e39d7c08e42bb5b074969f076a321d8
|
intellij-community
|
[vcs-log] make BekLinearGraph testable--
|
p
|
https://github.com/JetBrains/intellij-community
|
diff --git a/platform/vcs-log/graph/src/com/intellij/vcs/log/graph/impl/facade/BekBaseController.java b/platform/vcs-log/graph/src/com/intellij/vcs/log/graph/impl/facade/BekBaseController.java
index 573d1f7dffc92..0e941ce5a1838 100644
--- a/platform/vcs-log/graph/src/com/intellij/vcs/log/graph/impl/facade/BekBaseController.java
+++ b/platform/vcs-log/graph/src/com/intellij/vcs/log/graph/impl/facade/BekBaseController.java
@@ -25,6 +25,7 @@
import com.intellij.vcs.log.graph.api.permanent.PermanentGraphInfo;
import com.intellij.vcs.log.graph.impl.facade.bek.BekChecker;
import com.intellij.vcs.log.graph.impl.facade.bek.BekIntMap;
+import com.intellij.vcs.log.graph.impl.permanent.PermanentLinearGraphImpl;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
@@ -39,9 +40,9 @@ public class BekBaseController extends CascadeController {
public BekBaseController(@NotNull PermanentGraphInfo permanentGraphInfo, @NotNull BekIntMap bekIntMap) {
super(null, permanentGraphInfo);
myBekIntMap = bekIntMap;
- myBekGraph = new BekLinearGraph();
+ myBekGraph = new BekLinearGraph(myBekIntMap, myPermanentGraphInfo.getPermanentLinearGraph());
- assert BekChecker.checkLinearGraph(myBekGraph); // todo drop later
+ BekChecker.checkLinearGraph(myBekGraph);
}
@NotNull
@@ -85,16 +86,18 @@ public LinearGraph getCompiledGraph() {
return myBekGraph;
}
- private class BekLinearGraph implements LinearGraph {
- @NotNull private final LinearGraph myPermanentGraph;
+ public static class BekLinearGraph implements LinearGraph {
+ @NotNull private final LinearGraph myLinearGraph;
+ @NotNull private final BekIntMap myBekIntMap;
- private BekLinearGraph() {
- myPermanentGraph = myPermanentGraphInfo.getPermanentLinearGraph();
+ public BekLinearGraph(@NotNull BekIntMap bekIntMap, @NotNull LinearGraph linearGraph) {
+ myLinearGraph = linearGraph;
+ myBekIntMap = bekIntMap;
}
@Override
public int nodesCount() {
- return myPermanentGraph.nodesCount();
+ return myLinearGraph.nodesCount();
}
@Nullable
@@ -107,7 +110,7 @@ private Integer getNodeIndex(@Nullable Integer nodeId) {
@NotNull
@Override
public List<GraphEdge> getAdjacentEdges(int nodeIndex, @NotNull EdgeFilter filter) {
- return map(myPermanentGraph.getAdjacentEdges(myBekIntMap.getUsualIndex(nodeIndex), filter), new Function<GraphEdge, GraphEdge>() {
+ return map(myLinearGraph.getAdjacentEdges(myBekIntMap.getUsualIndex(nodeIndex), filter), new Function<GraphEdge, GraphEdge>() {
@Override
public GraphEdge fun(GraphEdge edge) {
return new GraphEdge(getNodeIndex(edge.getUpNodeIndex()), getNodeIndex(edge.getDownNodeIndex()), edge.getTargetId(),
diff --git a/platform/vcs-log/graph/src/com/intellij/vcs/log/graph/impl/facade/bek/BekChecker.java b/platform/vcs-log/graph/src/com/intellij/vcs/log/graph/impl/facade/bek/BekChecker.java
index b1dcc6a9bdd90..6e6cd7e0316ab 100644
--- a/platform/vcs-log/graph/src/com/intellij/vcs/log/graph/impl/facade/bek/BekChecker.java
+++ b/platform/vcs-log/graph/src/com/intellij/vcs/log/graph/impl/facade/bek/BekChecker.java
@@ -16,8 +16,10 @@
package com.intellij.vcs.log.graph.impl.facade.bek;
import com.intellij.openapi.diagnostic.Logger;
+import com.intellij.openapi.util.Pair;
import com.intellij.vcs.log.graph.api.LinearGraph;
import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
import static com.intellij.vcs.log.graph.utils.LinearGraphUtils.getDownNodes;
import static com.intellij.vcs.log.graph.utils.LinearGraphUtils.getUpNodes;
@@ -25,17 +27,29 @@
public class BekChecker {
private final static Logger LOG = Logger.getInstance("#com.intellij.vcs.log.graph.impl.facade.bek.BekChecker");
- public static boolean checkLinearGraph(@NotNull LinearGraph linearGraph) {
+ public static void checkLinearGraph(@NotNull LinearGraph linearGraph) {
+ Pair<Integer, Integer> reversedEdge = findReversedEdge(linearGraph);
+ if (reversedEdge != null) {
+ LOG.error("Illegal edge: up node " + reversedEdge.first + ", downNode " + reversedEdge.second);
+ }
+ }
+
+ @Nullable
+ public static Pair<Integer, Integer> findReversedEdge(@NotNull LinearGraph linearGraph) {
for (int i = 0; i < linearGraph.nodesCount(); i++) {
for (int downNode : getDownNodes(linearGraph, i)) {
- if (downNode <= i) LOG.error("Illegal node: " + i + ", with downNode: " + downNode);
+ if (downNode <= i) {
+ return Pair.create(i, downNode);
+ }
}
for (int upNode : getUpNodes(linearGraph, i)) {
- if (upNode >= i) LOG.error("Illegal node: " + i + ", with upNode: " + upNode);
+ if (upNode >= i) {
+ return Pair.create(upNode, i);
+ }
}
}
- return true;
+ return null;
}
}
|
0efa78710b4ff36dcf50457a4ec16090bc96787b
|
elasticsearch
|
Added clear scroll api.--The clear scroll api allows clear all resources associated with a `scroll_id` by deleting the `scroll_id` and its associated SearchContext.--Closes -3657-
|
a
|
https://github.com/elastic/elasticsearch
|
diff --git a/docs/reference/search/request/search-type.asciidoc b/docs/reference/search/request/search-type.asciidoc
index bf3126fea81c7..bb98ea9ad038e 100644
--- a/docs/reference/search/request/search-type.asciidoc
+++ b/docs/reference/search/request/search-type.asciidoc
@@ -127,3 +127,20 @@ returned. The total_hits will be maintained between scroll requests.
Note, scan search type does not support sorting (either on score or a
field) or faceting.
+
+=== Clear scroll api
+
+added[0.90.4]
+
+Besides consuming the scroll search until no hits has been returned a scroll
+search can also be aborted by deleting the `scroll_id`. This can be done via
+the clear scroll api. When the the `scroll_id` has been deleted also all the
+resources to keep the view open will cleaned open. Example usage:
+
+[source,js]
+--------------------------------------------------
+curl -XDELETE 'localhost:9200/_search/scroll/c2NhbjsxOjBLMzdpWEtqU2IyZHlmVURPeFJOZnc7MzowSzM3aVhLalNiMmR5ZlVET3hSTmZ3OzU6MEszN2lYS2pTYjJkeWZVRE94Uk5mdzsyOjBLMzdpWEtqU2IyZHlmVURPeFJOZnc7NDowSzM3aVhLalNiMmR5ZlVET3hSTmZ3Ow=='
+--------------------------------------------------
+
+Multiple scroll ids can be specified in a comma separated manner, if no id is
+specified then all scroll ids will be cleared up.
\ No newline at end of file
diff --git a/src/main/java/org/elasticsearch/action/ActionModule.java b/src/main/java/org/elasticsearch/action/ActionModule.java
index 9452696f12d70..18b4c5967c0b3 100644
--- a/src/main/java/org/elasticsearch/action/ActionModule.java
+++ b/src/main/java/org/elasticsearch/action/ActionModule.java
@@ -253,6 +253,7 @@ protected void configure() {
registerAction(PercolateAction.INSTANCE, TransportPercolateAction.class);
registerAction(MultiPercolateAction.INSTANCE, TransportMultiPercolateAction.class, TransportShardMultiPercolateAction.class);
registerAction(ExplainAction.INSTANCE, TransportExplainAction.class);
+ registerAction(ClearScrollAction.INSTANCE, TransportClearScrollAction.class);
// register Name -> GenericAction Map that can be injected to instances.
MapBinder<String, GenericAction> actionsBinder
diff --git a/src/main/java/org/elasticsearch/action/search/ClearScrollAction.java b/src/main/java/org/elasticsearch/action/search/ClearScrollAction.java
new file mode 100644
index 0000000000000..917f3eb3e7b55
--- /dev/null
+++ b/src/main/java/org/elasticsearch/action/search/ClearScrollAction.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to ElasticSearch and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. ElasticSearch licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.search;
+
+import org.elasticsearch.action.Action;
+import org.elasticsearch.client.Client;
+
+/**
+ */
+public class ClearScrollAction extends Action<ClearScrollRequest, ClearScrollResponse, ClearScrollRequestBuilder> {
+
+ public static final ClearScrollAction INSTANCE = new ClearScrollAction();
+ public static final String NAME = "clear_sc";
+
+ private ClearScrollAction() {
+ super(NAME);
+ }
+
+ @Override
+ public ClearScrollResponse newResponse() {
+ return new ClearScrollResponse();
+ }
+
+ @Override
+ public ClearScrollRequestBuilder newRequestBuilder(Client client) {
+ return new ClearScrollRequestBuilder(client);
+ }
+}
diff --git a/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java b/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java
new file mode 100644
index 0000000000000..ad16a5460578a
--- /dev/null
+++ b/src/main/java/org/elasticsearch/action/search/ClearScrollRequest.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to ElasticSearch and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. ElasticSearch licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.search;
+
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+
+import static com.google.common.collect.Lists.newArrayList;
+
+/**
+ */
+public class ClearScrollRequest extends ActionRequest {
+
+ private List<String> scrollIds;
+
+ public List<String> getScrollIds() {
+ return scrollIds;
+ }
+
+ public void setScrollIds(List<String> scrollIds) {
+ this.scrollIds = scrollIds;
+ }
+
+ public void addScrollId(String scrollId) {
+ if (scrollIds == null) {
+ scrollIds = newArrayList();
+ }
+ scrollIds.add(scrollId);
+ }
+
+ @Override
+ public ActionRequestValidationException validate() {
+ return null;
+ }
+
+ @Override
+ public void readFrom(StreamInput in) throws IOException {
+ super.readFrom(in);
+ scrollIds = Arrays.asList(in.readStringArray());
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ super.writeTo(out);
+ if (scrollIds == null) {
+ out.writeVInt(0);
+ } else {
+ out.writeStringArray(scrollIds.toArray(new String[scrollIds.size()]));
+ }
+ }
+
+}
diff --git a/src/main/java/org/elasticsearch/action/search/ClearScrollRequestBuilder.java b/src/main/java/org/elasticsearch/action/search/ClearScrollRequestBuilder.java
new file mode 100644
index 0000000000000..e984a4f171271
--- /dev/null
+++ b/src/main/java/org/elasticsearch/action/search/ClearScrollRequestBuilder.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to ElasticSearch and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. ElasticSearch licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.search;
+
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.ActionRequestBuilder;
+import org.elasticsearch.client.Client;
+import org.elasticsearch.client.internal.InternalClient;
+
+import java.util.List;
+
+/**
+ */
+public class ClearScrollRequestBuilder extends ActionRequestBuilder<ClearScrollRequest, ClearScrollResponse, ClearScrollRequestBuilder> {
+
+ public ClearScrollRequestBuilder(Client client) {
+ super((InternalClient) client, new ClearScrollRequest());
+ }
+
+ public ClearScrollRequestBuilder setScrollIds(List<String> cursorIds) {
+ request.setScrollIds(cursorIds);
+ return this;
+ }
+
+ public ClearScrollRequestBuilder addScrollId(String cursorId) {
+ request.addScrollId(cursorId);
+ return this;
+ }
+
+ @Override
+ protected void doExecute(ActionListener<ClearScrollResponse> listener) {
+ ((Client) client).clearScroll(request, listener);
+ }
+}
diff --git a/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java b/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java
new file mode 100644
index 0000000000000..8dced0fb4da7f
--- /dev/null
+++ b/src/main/java/org/elasticsearch/action/search/ClearScrollResponse.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to ElasticSearch and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. ElasticSearch licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.search;
+
+import org.elasticsearch.action.ActionResponse;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+
+import java.io.IOException;
+
+/**
+ */
+public class ClearScrollResponse extends ActionResponse {
+
+ private boolean succeeded;
+
+ public ClearScrollResponse(boolean succeeded) {
+ this.succeeded = succeeded;
+ }
+
+ ClearScrollResponse() {
+ }
+
+ public boolean isSucceeded() {
+ return succeeded;
+ }
+
+ @Override
+ public void readFrom(StreamInput in) throws IOException {
+ super.readFrom(in);
+ succeeded = in.readBoolean();
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ super.writeTo(out);
+ out.writeBoolean(succeeded);
+ }
+}
diff --git a/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java b/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java
new file mode 100644
index 0000000000000..396cae028cd86
--- /dev/null
+++ b/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java
@@ -0,0 +1,153 @@
+/*
+ * Licensed to ElasticSearch and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. ElasticSearch licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.search;
+
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.support.TransportAction;
+import org.elasticsearch.cluster.ClusterService;
+import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.node.DiscoveryNode;
+import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.collect.Tuple;
+import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.search.action.SearchServiceTransportAction;
+import org.elasticsearch.threadpool.ThreadPool;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
+
+import static org.elasticsearch.action.search.type.TransportSearchHelper.parseScrollId;
+
+/**
+ */
+public class TransportClearScrollAction extends TransportAction<ClearScrollRequest, ClearScrollResponse> {
+
+ private final ClusterService clusterService;
+ private final SearchServiceTransportAction searchServiceTransportAction;
+
+ @Inject
+ public TransportClearScrollAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, SearchServiceTransportAction searchServiceTransportAction) {
+ super(settings, threadPool);
+ this.clusterService = clusterService;
+ this.searchServiceTransportAction = searchServiceTransportAction;
+ }
+
+ @Override
+ protected void doExecute(ClearScrollRequest request, final ActionListener<ClearScrollResponse> listener) {
+ new Async(request, listener, clusterService.state()).run();
+ }
+
+ private class Async {
+
+ final DiscoveryNodes nodes;
+ final AtomicInteger expectedOps;
+ final ClearScrollRequest request;
+ final List<Tuple<String, Long>[]> contexts = new ArrayList<Tuple<String, Long>[]>();
+ final AtomicReference<Throwable> expHolder;
+ final ActionListener<ClearScrollResponse> listener;
+
+ private Async(ClearScrollRequest request, ActionListener<ClearScrollResponse> listener, ClusterState clusterState) {
+ int expectedOps = 0;
+ this.nodes = clusterState.nodes();
+ if (request.getScrollIds() == null || request.getScrollIds().isEmpty()) {
+ expectedOps = nodes.size();
+ } else {
+ for (String parsedScrollId : request.getScrollIds()) {
+ Tuple<String, Long>[] context = parseScrollId(parsedScrollId).getContext();
+ expectedOps += context.length;
+ this.contexts.add(context);
+ }
+ }
+
+ this.request = request;
+ this.listener = listener;
+ this.expHolder = new AtomicReference<Throwable>();
+ this.expectedOps = new AtomicInteger(expectedOps);
+ }
+
+ public void run() {
+ if (expectedOps.get() == 0) {
+ listener.onResponse(new ClearScrollResponse(true));
+ return;
+ }
+
+ if (contexts.isEmpty()) {
+ for (final DiscoveryNode node : nodes) {
+ searchServiceTransportAction.sendClearAllScrollContexts(node, request, new ActionListener<Boolean>() {
+ @Override
+ public void onResponse(Boolean success) {
+ onFreedContext();
+ }
+
+ @Override
+ public void onFailure(Throwable e) {
+ onFailedFreedContext(e, node);
+ }
+ });
+ }
+ } else {
+ for (Tuple<String, Long>[] context : contexts) {
+ for (Tuple<String, Long> target : context) {
+ final DiscoveryNode node = nodes.get(target.v1());
+ if (node == null) {
+ onFreedContext();
+ continue;
+ }
+
+ searchServiceTransportAction.sendFreeContext(node, target.v2(), request, new ActionListener<Boolean>() {
+ @Override
+ public void onResponse(Boolean success) {
+ onFreedContext();
+ }
+
+ @Override
+ public void onFailure(Throwable e) {
+ onFailedFreedContext(e, node);
+ }
+ });
+ }
+ }
+ }
+ }
+
+ void onFreedContext() {
+ assert expectedOps.get() > 0;
+ if (expectedOps.decrementAndGet() == 0) {
+ boolean succeeded = expHolder.get() == null;
+ listener.onResponse(new ClearScrollResponse(succeeded));
+ }
+ }
+
+ void onFailedFreedContext(Throwable e, DiscoveryNode node) {
+ logger.warn("Clear SC failed on node[{}]", e, node);
+ assert expectedOps.get() > 0;
+ if (expectedOps.decrementAndGet() == 0) {
+ listener.onResponse(new ClearScrollResponse(false));
+ } else {
+ expHolder.set(e);
+ }
+ }
+
+ }
+
+}
diff --git a/src/main/java/org/elasticsearch/client/Client.java b/src/main/java/org/elasticsearch/client/Client.java
index 98d916cc4561f..97b935e1b77d1 100644
--- a/src/main/java/org/elasticsearch/client/Client.java
+++ b/src/main/java/org/elasticsearch/client/Client.java
@@ -540,4 +540,19 @@ public interface Client {
*/
void explain(ExplainRequest request, ActionListener<ExplainResponse> listener);
+ /**
+ * Clears the search contexts associated with specified scroll ids.
+ */
+ ClearScrollRequestBuilder prepareClearScroll();
+
+ /**
+ * Clears the search contexts associated with specified scroll ids.
+ */
+ ActionFuture<ClearScrollResponse> clearScroll(ClearScrollRequest request);
+
+ /**
+ * Clears the search contexts associated with specified scroll ids.
+ */
+ void clearScroll(ClearScrollRequest request, ActionListener<ClearScrollResponse> listener);
+
}
\ No newline at end of file
diff --git a/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/src/main/java/org/elasticsearch/client/support/AbstractClient.java
index d16ada20e3f48..c7bad1d8d289a 100644
--- a/src/main/java/org/elasticsearch/client/support/AbstractClient.java
+++ b/src/main/java/org/elasticsearch/client/support/AbstractClient.java
@@ -366,4 +366,19 @@ public ActionFuture<ExplainResponse> explain(ExplainRequest request) {
public void explain(ExplainRequest request, ActionListener<ExplainResponse> listener) {
execute(ExplainAction.INSTANCE, request, listener);
}
+
+ @Override
+ public void clearScroll(ClearScrollRequest request, ActionListener<ClearScrollResponse> listener) {
+ execute(ClearScrollAction.INSTANCE, request, listener);
+ }
+
+ @Override
+ public ActionFuture<ClearScrollResponse> clearScroll(ClearScrollRequest request) {
+ return execute(ClearScrollAction.INSTANCE, request);
+ }
+
+ @Override
+ public ClearScrollRequestBuilder prepareClearScroll() {
+ return new ClearScrollRequestBuilder(this);
+ }
}
diff --git a/src/main/java/org/elasticsearch/rest/action/RestActionModule.java b/src/main/java/org/elasticsearch/rest/action/RestActionModule.java
index 2bea5451a5052..951ade07c5404 100644
--- a/src/main/java/org/elasticsearch/rest/action/RestActionModule.java
+++ b/src/main/java/org/elasticsearch/rest/action/RestActionModule.java
@@ -86,6 +86,7 @@
import org.elasticsearch.rest.action.mlt.RestMoreLikeThisAction;
import org.elasticsearch.rest.action.percolate.RestMultiPercolateAction;
import org.elasticsearch.rest.action.percolate.RestPercolateAction;
+import org.elasticsearch.rest.action.search.RestClearScrollAction;
import org.elasticsearch.rest.action.search.RestMultiSearchAction;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.rest.action.search.RestSearchScrollAction;
@@ -199,5 +200,6 @@ protected void configure() {
bind(RestIndicesAction.class).asEagerSingleton();
// Fully qualified to prevent interference with rest.action.count.RestCountAction
bind(org.elasticsearch.rest.action.cat.RestCountAction.class).asEagerSingleton();
+ bind(RestClearScrollAction.class).asEagerSingleton();;
}
}
diff --git a/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java b/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java
new file mode 100644
index 0000000000000..1c6979dc42688
--- /dev/null
+++ b/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to ElasticSearch and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. ElasticSearch licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.rest.action.search;
+
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.search.ClearScrollRequest;
+import org.elasticsearch.action.search.ClearScrollResponse;
+import org.elasticsearch.client.Client;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentBuilderString;
+import org.elasticsearch.rest.*;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import static org.elasticsearch.rest.RestRequest.Method.DELETE;
+import static org.elasticsearch.rest.RestStatus.OK;
+import static org.elasticsearch.rest.action.support.RestXContentBuilder.restContentBuilder;
+
+/**
+ */
+public class RestClearScrollAction extends BaseRestHandler {
+
+ @Inject
+ public RestClearScrollAction(Settings settings, Client client, RestController controller) {
+ super(settings, client);
+
+ controller.registerHandler(DELETE, "/_search/scroll", this);
+ controller.registerHandler(DELETE, "/_search/scroll/{scroll_id}", this);
+ }
+
+ @Override
+ public void handleRequest(final RestRequest request, final RestChannel channel) {
+ String scrollIds = request.param("scroll_id");
+
+ ClearScrollRequest clearRequest = new ClearScrollRequest();
+ clearRequest.setScrollIds(Arrays.asList(splitScrollIds(scrollIds)));
+ client.clearScroll(clearRequest, new ActionListener<ClearScrollResponse>() {
+ @Override
+ public void onResponse(ClearScrollResponse response) {
+ try {
+ XContentBuilder builder = restContentBuilder(request);
+ builder.startObject();
+ builder.field(Fields.OK, response.isSucceeded());
+ builder.endObject();
+ channel.sendResponse(new XContentRestResponse(request, OK, builder));
+ } catch (Throwable e) {
+ onFailure(e);
+ }
+ }
+
+ @Override
+ public void onFailure(Throwable e) {
+ try {
+ channel.sendResponse(new XContentThrowableRestResponse(request, e));
+ } catch (IOException e1) {
+ logger.error("Failed to send failure response", e1);
+ }
+ }
+ });
+ }
+
+ public static String[] splitScrollIds(String scrollIds) {
+ if (scrollIds == null) {
+ return Strings.EMPTY_ARRAY;
+ }
+ return Strings.splitStringByCommaToArray(scrollIds);
+ }
+
+ static final class Fields {
+
+ static final XContentBuilderString OK = new XContentBuilderString("ok");
+
+ }
+}
diff --git a/src/main/java/org/elasticsearch/search/SearchService.java b/src/main/java/org/elasticsearch/search/SearchService.java
index f02a18332625f..53455a74b8ca4 100644
--- a/src/main/java/org/elasticsearch/search/SearchService.java
+++ b/src/main/java/org/elasticsearch/search/SearchService.java
@@ -509,6 +509,14 @@ private void freeContext(SearchContext context) {
context.release();
}
+ public void freeAllScrollContexts() {
+ for (SearchContext searchContext : activeContexts.values()) {
+ if (searchContext.scroll() != null) {
+ freeContext(searchContext);
+ }
+ }
+ }
+
private void contextProcessing(SearchContext context) {
// disable timeout while executing a search
context.accessed(-1);
diff --git a/src/main/java/org/elasticsearch/search/action/SearchServiceTransportAction.java b/src/main/java/org/elasticsearch/search/action/SearchServiceTransportAction.java
index ecdd0be90a8b9..3ff0ae0c94687 100644
--- a/src/main/java/org/elasticsearch/search/action/SearchServiceTransportAction.java
+++ b/src/main/java/org/elasticsearch/search/action/SearchServiceTransportAction.java
@@ -19,6 +19,8 @@
package org.elasticsearch.search.action;
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.search.ClearScrollRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.node.DiscoveryNode;
@@ -81,6 +83,7 @@ public SearchServiceTransportAction(Settings settings, TransportService transpor
this.searchService = searchService;
transportService.registerHandler(SearchFreeContextTransportHandler.ACTION, new SearchFreeContextTransportHandler());
+ transportService.registerHandler(ClearScrollContextsTransportHandler.ACTION, new ClearScrollContextsTransportHandler());
transportService.registerHandler(SearchDfsTransportHandler.ACTION, new SearchDfsTransportHandler());
transportService.registerHandler(SearchQueryTransportHandler.ACTION, new SearchQueryTransportHandler());
transportService.registerHandler(SearchQueryByIdTransportHandler.ACTION, new SearchQueryByIdTransportHandler());
@@ -101,6 +104,64 @@ public void sendFreeContext(DiscoveryNode node, final long contextId, SearchRequ
}
}
+ public void sendFreeContext(DiscoveryNode node, long contextId, ClearScrollRequest request, final ActionListener<Boolean> actionListener) {
+ if (clusterService.state().nodes().localNodeId().equals(node.id())) {
+ searchService.freeContext(contextId);
+ actionListener.onResponse(true);
+ } else {
+ transportService.sendRequest(node, SearchFreeContextTransportHandler.ACTION, new SearchFreeContextRequest(request, contextId), new TransportResponseHandler<TransportResponse>() {
+ @Override
+ public TransportResponse newInstance() {
+ return TransportResponse.Empty.INSTANCE;
+ }
+
+ @Override
+ public void handleResponse(TransportResponse response) {
+ actionListener.onResponse(true);
+ }
+
+ @Override
+ public void handleException(TransportException exp) {
+ actionListener.onFailure(exp);
+ }
+
+ @Override
+ public String executor() {
+ return ThreadPool.Names.SAME;
+ }
+ });
+ }
+ }
+
+ public void sendClearAllScrollContexts(DiscoveryNode node, ClearScrollRequest request, final ActionListener<Boolean> actionListener) {
+ if (clusterService.state().nodes().localNodeId().equals(node.id())) {
+ searchService.freeAllScrollContexts();
+ actionListener.onResponse(true);
+ } else {
+ transportService.sendRequest(node, ClearScrollContextsTransportHandler.ACTION, new ClearScrollContextsRequest(request), new TransportResponseHandler<TransportResponse>() {
+ @Override
+ public TransportResponse newInstance() {
+ return TransportResponse.Empty.INSTANCE;
+ }
+
+ @Override
+ public void handleResponse(TransportResponse response) {
+ actionListener.onResponse(true);
+ }
+
+ @Override
+ public void handleException(TransportException exp) {
+ actionListener.onFailure(exp);
+ }
+
+ @Override
+ public String executor() {
+ return ThreadPool.Names.SAME;
+ }
+ });
+ }
+ }
+
public void sendExecuteDfs(DiscoveryNode node, final ShardSearchRequest request, final SearchServiceListener<DfsSearchResult> listener) {
if (clusterService.state().nodes().localNodeId().equals(node.id())) {
try {
@@ -448,7 +509,7 @@ class SearchFreeContextRequest extends TransportRequest {
SearchFreeContextRequest() {
}
- SearchFreeContextRequest(SearchRequest request, long id) {
+ SearchFreeContextRequest(TransportRequest request, long id) {
super(request);
this.id = id;
}
@@ -493,6 +554,39 @@ public String executor() {
}
}
+ class ClearScrollContextsRequest extends TransportRequest {
+
+ ClearScrollContextsRequest() {
+ }
+
+ ClearScrollContextsRequest(TransportRequest request) {
+ super(request);
+ }
+
+ }
+
+ class ClearScrollContextsTransportHandler extends BaseTransportRequestHandler<ClearScrollContextsRequest> {
+
+ static final String ACTION = "search/clearScrollContexts";
+
+ @Override
+ public ClearScrollContextsRequest newInstance() {
+ return new ClearScrollContextsRequest();
+ }
+
+ @Override
+ public void messageReceived(ClearScrollContextsRequest request, TransportChannel channel) throws Exception {
+ searchService.freeAllScrollContexts();
+ channel.sendResponse(TransportResponse.Empty.INSTANCE);
+ }
+
+ @Override
+ public String executor() {
+ // freeing the context is cheap,
+ // no need for fork it to another thread
+ return ThreadPool.Names.SAME;
+ }
+ }
private class SearchDfsTransportHandler extends BaseTransportRequestHandler<ShardSearchRequest> {
diff --git a/src/test/java/org/elasticsearch/test/integration/search/scroll/SearchScrollTests.java b/src/test/java/org/elasticsearch/test/integration/search/scroll/SearchScrollTests.java
index bffc6077d4560..3f04758b6ca8c 100644
--- a/src/test/java/org/elasticsearch/test/integration/search/scroll/SearchScrollTests.java
+++ b/src/test/java/org/elasticsearch/test/integration/search/scroll/SearchScrollTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.test.integration.search.scroll;
+import org.elasticsearch.action.search.ClearScrollResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.Priority;
@@ -33,7 +34,6 @@
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.*;
-import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
/**
@@ -217,4 +217,179 @@ public void testScrollAndUpdateIndex() throws Exception {
assertThat(client().prepareCount().setQuery(termQuery("message", "update")).execute().actionGet().getCount(), equalTo(500l));
assertThat(client().prepareCount().setQuery(termQuery("message", "update")).execute().actionGet().getCount(), equalTo(500l));
}
+
+ @Test
+ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception {
+ try {
+ client().admin().indices().prepareDelete("test").execute().actionGet();
+ } catch (Exception e) {
+ // ignore
+ }
+ client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("index.number_of_shards", 3)).execute().actionGet();
+ client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
+
+ client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
+
+ for (int i = 0; i < 100; i++) {
+ client().prepareIndex("test", "type1", Integer.toString(i)).setSource(jsonBuilder().startObject().field("field", i).endObject()).execute().actionGet();
+ }
+
+ client().admin().indices().prepareRefresh().execute().actionGet();
+
+ SearchResponse searchResponse1 = client().prepareSearch()
+ .setQuery(matchAllQuery())
+ .setSize(35)
+ .setScroll(TimeValue.timeValueMinutes(2))
+ .addSort("field", SortOrder.ASC)
+ .execute().actionGet();
+
+ SearchResponse searchResponse2 = client().prepareSearch()
+ .setQuery(matchAllQuery())
+ .setSize(35)
+ .setScroll(TimeValue.timeValueMinutes(2))
+ .addSort("field", SortOrder.ASC)
+ .execute().actionGet();
+
+ long counter1 = 0;
+ long counter2 = 0;
+
+ assertThat(searchResponse1.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse1.getHits().hits().length, equalTo(35));
+ for (SearchHit hit : searchResponse1.getHits()) {
+ assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter1++));
+ }
+
+ assertThat(searchResponse2.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse2.getHits().hits().length, equalTo(35));
+ for (SearchHit hit : searchResponse2.getHits()) {
+ assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter2++));
+ }
+
+ searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId())
+ .setScroll(TimeValue.timeValueMinutes(2))
+ .execute().actionGet();
+
+ searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId())
+ .setScroll(TimeValue.timeValueMinutes(2))
+ .execute().actionGet();
+
+ assertThat(searchResponse1.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse1.getHits().hits().length, equalTo(35));
+ for (SearchHit hit : searchResponse1.getHits()) {
+ assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter1++));
+ }
+
+ assertThat(searchResponse2.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse2.getHits().hits().length, equalTo(35));
+ for (SearchHit hit : searchResponse2.getHits()) {
+ assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter2++));
+ }
+
+ ClearScrollResponse clearResponse = client().prepareClearScroll()
+ .addScrollId(searchResponse1.getScrollId())
+ .addScrollId(searchResponse2.getScrollId())
+ .execute().actionGet();
+ assertThat(clearResponse.isSucceeded(), equalTo(true));
+
+ searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId())
+ .setScroll(TimeValue.timeValueMinutes(2))
+ .execute().actionGet();
+
+ searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId())
+ .setScroll(TimeValue.timeValueMinutes(2))
+ .execute().actionGet();
+
+ assertThat(searchResponse1.getHits().getTotalHits(), equalTo(0l));
+ assertThat(searchResponse1.getHits().hits().length, equalTo(0));
+
+ assertThat(searchResponse2.getHits().getTotalHits(), equalTo(0l));
+ assertThat(searchResponse2.getHits().hits().length, equalTo(0));
+ }
+
+ @Test
+ public void testSimpleScrollQueryThenFetch_clearAllScrollIds() throws Exception {
+ try {
+ client().admin().indices().prepareDelete("test").execute().actionGet();
+ } catch (Exception e) {
+ // ignore
+ }
+ client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("index.number_of_shards", 3)).execute().actionGet();
+ client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
+
+ client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
+
+ for (int i = 0; i < 100; i++) {
+ client().prepareIndex("test", "type1", Integer.toString(i)).setSource(jsonBuilder().startObject().field("field", i).endObject()).execute().actionGet();
+ }
+
+ client().admin().indices().prepareRefresh().execute().actionGet();
+
+ SearchResponse searchResponse1 = client().prepareSearch()
+ .setQuery(matchAllQuery())
+ .setSize(35)
+ .setScroll(TimeValue.timeValueMinutes(2))
+ .addSort("field", SortOrder.ASC)
+ .execute().actionGet();
+
+ SearchResponse searchResponse2 = client().prepareSearch()
+ .setQuery(matchAllQuery())
+ .setSize(35)
+ .setScroll(TimeValue.timeValueMinutes(2))
+ .addSort("field", SortOrder.ASC)
+ .execute().actionGet();
+
+ long counter1 = 0;
+ long counter2 = 0;
+
+ assertThat(searchResponse1.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse1.getHits().hits().length, equalTo(35));
+ for (SearchHit hit : searchResponse1.getHits()) {
+ assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter1++));
+ }
+
+ assertThat(searchResponse2.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse2.getHits().hits().length, equalTo(35));
+ for (SearchHit hit : searchResponse2.getHits()) {
+ assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter2++));
+ }
+
+ searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId())
+ .setScroll(TimeValue.timeValueMinutes(2))
+ .execute().actionGet();
+
+ searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId())
+ .setScroll(TimeValue.timeValueMinutes(2))
+ .execute().actionGet();
+
+ assertThat(searchResponse1.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse1.getHits().hits().length, equalTo(35));
+ for (SearchHit hit : searchResponse1.getHits()) {
+ assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter1++));
+ }
+
+ assertThat(searchResponse2.getHits().getTotalHits(), equalTo(100l));
+ assertThat(searchResponse2.getHits().hits().length, equalTo(35));
+ for (SearchHit hit : searchResponse2.getHits()) {
+ assertThat(((Number) hit.sortValues()[0]).longValue(), equalTo(counter2++));
+ }
+
+ ClearScrollResponse clearResponse = client().prepareClearScroll()
+ .execute().actionGet();
+ assertThat(clearResponse.isSucceeded(), equalTo(true));
+
+ searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId())
+ .setScroll(TimeValue.timeValueMinutes(2))
+ .execute().actionGet();
+
+ searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId())
+ .setScroll(TimeValue.timeValueMinutes(2))
+ .execute().actionGet();
+
+ assertThat(searchResponse1.getHits().getTotalHits(), equalTo(0l));
+ assertThat(searchResponse1.getHits().hits().length, equalTo(0));
+
+ assertThat(searchResponse2.getHits().getTotalHits(), equalTo(0l));
+ assertThat(searchResponse2.getHits().hits().length, equalTo(0));
+ }
+
}
|
b701a84d372be5cdc2fa1382e7755407700620cf
|
hadoop
|
Merge r1580077 from trunk. YARN-1849. Fixed NPE in- ResourceTrackerService-registerNodeManager for UAM. Contributed by Karthik- Kambatla--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1580078 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index ec57c127fb78f..198d002c7ec03 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -524,6 +524,9 @@ Release 2.4.0 - UNRELEASED
YARN-1670. Fixed a bug in log-aggregation that can cause the writer to write
more log-data than the log-length that it records. (Mit Desai via vinodk)
+ YARN-1849. Fixed NPE in ResourceTrackerService#registerNodeManager for UAM
+ (Karthik Kambatla via jianhe )
+
Release 2.3.1 - UNRELEASED
INCOMPATIBLE CHANGES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
index 8a2c53958cba7..1d4032048e468 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
@@ -31,6 +31,7 @@
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.util.VersionUtil;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
@@ -187,12 +188,51 @@ protected void serviceStop() throws Exception {
super.serviceStop();
}
+ /**
+ * Helper method to handle received ContainerStatus. If this corresponds to
+ * the completion of a master-container of a managed AM,
+ * we call the handler for RMAppAttemptContainerFinishedEvent.
+ */
+ @SuppressWarnings("unchecked")
+ @VisibleForTesting
+ void handleContainerStatus(ContainerStatus containerStatus) {
+ ApplicationAttemptId appAttemptId =
+ containerStatus.getContainerId().getApplicationAttemptId();
+ RMApp rmApp =
+ rmContext.getRMApps().get(appAttemptId.getApplicationId());
+ if (rmApp == null) {
+ LOG.error("Received finished container : "
+ + containerStatus.getContainerId()
+ + "for unknown application " + appAttemptId.getApplicationId()
+ + " Skipping.");
+ return;
+ }
+
+ if (rmApp.getApplicationSubmissionContext().getUnmanagedAM()) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Ignoring container completion status for unmanaged AM"
+ + rmApp.getApplicationId());
+ }
+ return;
+ }
+
+ RMAppAttempt rmAppAttempt = rmApp.getRMAppAttempt(appAttemptId);
+ Container masterContainer = rmAppAttempt.getMasterContainer();
+ if (masterContainer.getId().equals(containerStatus.getContainerId())
+ && containerStatus.getState() == ContainerState.COMPLETE) {
+ // sending master container finished event.
+ RMAppAttemptContainerFinishedEvent evt =
+ new RMAppAttemptContainerFinishedEvent(appAttemptId,
+ containerStatus);
+ rmContext.getDispatcher().getEventHandler().handle(evt);
+ }
+ }
+
@SuppressWarnings("unchecked")
@Override
public RegisterNodeManagerResponse registerNodeManager(
RegisterNodeManagerRequest request) throws YarnException,
IOException {
-
NodeId nodeId = request.getNodeId();
String host = nodeId.getHost();
int cmPort = nodeId.getPort();
@@ -204,29 +244,7 @@ public RegisterNodeManagerResponse registerNodeManager(
LOG.info("received container statuses on node manager register :"
+ request.getContainerStatuses());
for (ContainerStatus containerStatus : request.getContainerStatuses()) {
- ApplicationAttemptId appAttemptId =
- containerStatus.getContainerId().getApplicationAttemptId();
- RMApp rmApp =
- rmContext.getRMApps().get(appAttemptId.getApplicationId());
- if (rmApp != null) {
- RMAppAttempt rmAppAttempt = rmApp.getRMAppAttempt(appAttemptId);
- if (rmAppAttempt != null) {
- if (rmAppAttempt.getMasterContainer().getId()
- .equals(containerStatus.getContainerId())
- && containerStatus.getState() == ContainerState.COMPLETE) {
- // sending master container finished event.
- RMAppAttemptContainerFinishedEvent evt =
- new RMAppAttemptContainerFinishedEvent(appAttemptId,
- containerStatus);
- rmContext.getDispatcher().getEventHandler().handle(evt);
- }
- }
- } else {
- LOG.error("Received finished container :"
- + containerStatus.getContainerId()
- + " for non existing application :"
- + appAttemptId.getApplicationId());
- }
+ handleContainerStatus(containerStatus);
}
}
RegisterNodeManagerResponse response = recordFactory
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
index 697a18099b339..3e90ec8ec1d5a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
@@ -35,9 +35,11 @@
import javax.crypto.SecretKey;
+import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
@@ -629,7 +631,9 @@ public Container getMasterContainer() {
}
}
- private void setMasterContainer(Container container) {
+ @InterfaceAudience.Private
+ @VisibleForTesting
+ public void setMasterContainer(Container container) {
masterContainer = container;
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceTrackerService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceTrackerService.java
index 303e0fb56e549..2f16b85699d37 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceTrackerService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceTrackerService.java
@@ -26,8 +26,6 @@
import java.util.HashMap;
import java.util.List;
-import org.junit.Assert;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.metrics2.MetricsSystem;
@@ -45,21 +43,29 @@
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.event.DrainDispatcher;
+import org.apache.hadoop.yarn.event.Event;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerResponse;
import org.apache.hadoop.yarn.server.api.records.NodeAction;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
+import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptImpl;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.Records;
import org.apache.hadoop.yarn.util.YarnVersionInfo;
+
import org.junit.After;
+import org.junit.Assert;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.verify;
public class TestResourceTrackerService {
@@ -468,26 +474,64 @@ private void checkUnealthyNMCount(MockRM rm, MockNM nm1, boolean health,
ClusterMetrics.getMetrics().getUnhealthyNMs());
}
+ @SuppressWarnings("unchecked")
@Test
- public void testNodeRegistrationWithContainers() throws Exception {
- rm = new MockRM();
- rm.init(new YarnConfiguration());
+ public void testHandleContainerStatusInvalidCompletions() throws Exception {
+ rm = new MockRM(new YarnConfiguration());
rm.start();
- RMApp app = rm.submitApp(1024);
- MockNM nm = rm.registerNode("host1:1234", 8192);
- nm.nodeHeartbeat(true);
+ EventHandler handler =
+ spy(rm.getRMContext().getDispatcher().getEventHandler());
- // Register node with some container statuses
+ // Case 1: Unmanaged AM
+ RMApp app = rm.submitApp(1024, true);
+
+ // Case 1.1: AppAttemptId is null
ContainerStatus status = ContainerStatus.newInstance(
ContainerId.newInstance(ApplicationAttemptId.newInstance(
app.getApplicationId(), 2), 1),
ContainerState.COMPLETE, "Dummy Completed", 0);
+ rm.getResourceTrackerService().handleContainerStatus(status);
+ verify(handler, never()).handle((Event) any());
+
+ // Case 1.2: Master container is null
+ RMAppAttemptImpl currentAttempt =
+ (RMAppAttemptImpl) app.getCurrentAppAttempt();
+ currentAttempt.setMasterContainer(null);
+ status = ContainerStatus.newInstance(
+ ContainerId.newInstance(currentAttempt.getAppAttemptId(), 0),
+ ContainerState.COMPLETE, "Dummy Completed", 0);
+ rm.getResourceTrackerService().handleContainerStatus(status);
+ verify(handler, never()).handle((Event)any());
- // The following shouldn't throw NPE
- nm.registerNode(Collections.singletonList(status));
- assertEquals("Incorrect number of nodes", 1,
- rm.getRMContext().getRMNodes().size());
+ // Case 2: Managed AM
+ app = rm.submitApp(1024);
+
+ // Case 2.1: AppAttemptId is null
+ status = ContainerStatus.newInstance(
+ ContainerId.newInstance(ApplicationAttemptId.newInstance(
+ app.getApplicationId(), 2), 1),
+ ContainerState.COMPLETE, "Dummy Completed", 0);
+ try {
+ rm.getResourceTrackerService().handleContainerStatus(status);
+ } catch (Exception e) {
+ // expected - ignore
+ }
+ verify(handler, never()).handle((Event)any());
+
+ // Case 2.2: Master container is null
+ currentAttempt =
+ (RMAppAttemptImpl) app.getCurrentAppAttempt();
+ currentAttempt.setMasterContainer(null);
+ status = ContainerStatus.newInstance(
+ ContainerId.newInstance(currentAttempt.getAppAttemptId(), 0),
+ ContainerState.COMPLETE, "Dummy Completed", 0);
+ try {
+ rm.getResourceTrackerService().handleContainerStatus(status);
+ } catch (Exception e) {
+ // expected - ignore
+ }
+ verify(handler, never()).handle((Event)any());
}
@Test
|
e8db1654df89d3c60a6620a68fc5b85137e05b42
|
orientdb
|
Renamed everywhere OTreeMap in OMVRBTree as the- new name of the algorithm: OMVRBTree--
|
p
|
https://github.com/orientechnologies/orientdb
|
diff --git a/commons/src/main/java/com/orientechnologies/common/collection/AbstractEntryIterator.java b/commons/src/main/java/com/orientechnologies/common/collection/AbstractEntryIterator.java
index b30c3731226..70fedb1e5ae 100644
--- a/commons/src/main/java/com/orientechnologies/common/collection/AbstractEntryIterator.java
+++ b/commons/src/main/java/com/orientechnologies/common/collection/AbstractEntryIterator.java
@@ -20,15 +20,15 @@
import java.util.NoSuchElementException;
/**
- * Base class for OTreeMap Iterators
+ * Base class for OMVRBTree Iterators
*/
abstract class AbstractEntryIterator<K, V, T> implements Iterator<T> {
- OTreeMap<K, V> tree;
- OTreeMapEntry<K, V> next;
- OTreeMapEntry<K, V> lastReturned;
+ OMVRBTree<K, V> tree;
+ OMVRBTreeEntry<K, V> next;
+ OMVRBTreeEntry<K, V> lastReturned;
int expectedModCount;
- AbstractEntryIterator(OTreeMapEntry<K, V> first) {
+ AbstractEntryIterator(OMVRBTreeEntry<K, V> first) {
if (first == null)
// IN CASE OF ABSTRACTMAP.HASHCODE()
return;
@@ -41,10 +41,10 @@ abstract class AbstractEntryIterator<K, V, T> implements Iterator<T> {
}
public final boolean hasNext() {
- return next != null && (OTreeMap.successor(next) != null || tree.pageIndex < next.getSize() - 1);
+ return next != null && (OMVRBTree.successor(next) != null || tree.pageIndex < next.getSize() - 1);
}
- final OTreeMapEntry<K, V> nextEntry() {
+ final OMVRBTreeEntry<K, V> nextEntry() {
if (next == null)
throw new NoSuchElementException();
@@ -57,20 +57,20 @@ final OTreeMapEntry<K, V> nextEntry() {
throw new ConcurrentModificationException();
tree.pageIndex = 0;
- next = OTreeMap.successor(next);
+ next = OMVRBTree.successor(next);
lastReturned = next;
}
return next;
}
- final OTreeMapEntry<K, V> prevEntry() {
- OTreeMapEntry<K, V> e = next;
+ final OMVRBTreeEntry<K, V> prevEntry() {
+ OMVRBTreeEntry<K, V> e = next;
if (e == null)
throw new NoSuchElementException();
if (tree.modCount != expectedModCount)
throw new ConcurrentModificationException();
- next = OTreeMap.predecessor(e);
+ next = OMVRBTree.predecessor(e);
lastReturned = e;
return e;
}
diff --git a/commons/src/main/java/com/orientechnologies/common/collection/OTreeMap.java b/commons/src/main/java/com/orientechnologies/common/collection/OMVRBTree.java
similarity index 80%
rename from commons/src/main/java/com/orientechnologies/common/collection/OTreeMap.java
rename to commons/src/main/java/com/orientechnologies/common/collection/OMVRBTree.java
index d326af052f1..10403e270c1 100644
--- a/commons/src/main/java/com/orientechnologies/common/collection/OTreeMap.java
+++ b/commons/src/main/java/com/orientechnologies/common/collection/OMVRBTree.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 1999-2010 Luca Garulli (l.garulli--at--orientechnologies.com)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.orientechnologies.common.collection;
import java.io.IOException;
@@ -18,9 +33,19 @@
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.common.profiler.OProfiler;
+/**
+ * Base abstract class of MVRB-Tree algorithm.
+ *
+ * @author Luca Garulli (l.garulli--at--orientechnologies.com)
+ *
+ * @param <K>
+ * Key type
+ * @param <V>
+ * Value type
+ */
@SuppressWarnings("unchecked")
-public abstract class OTreeMap<K, V> extends AbstractMap<K, V> implements ONavigableMap<K, V>, Cloneable, java.io.Serializable {
- protected OTreeMapEventListener<K, V> listener;
+public abstract class OMVRBTree<K, V> extends AbstractMap<K, V> implements ONavigableMap<K, V>, Cloneable, java.io.Serializable {
+ protected OMVRBTreeEventListener<K, V> listener;
boolean pageItemFound = false;
int pageItemComparator = 0;
protected volatile int pageIndex = -1;
@@ -41,7 +66,7 @@ public abstract class OTreeMap<K, V> extends AbstractMap<K, V> implements ONavig
*/
private final Comparator<? super K> comparator;
- protected transient OTreeMapEntry<K, V> root = null;
+ protected transient OMVRBTreeEntry<K, V> root = null;
/**
* The number of structural modifications to the tree.
@@ -50,14 +75,14 @@ public abstract class OTreeMap<K, V> extends AbstractMap<K, V> implements ONavig
transient boolean runtimeCheckEnabled = false;
- public OTreeMap(final int iSize, final float iLoadFactor) {
+ public OMVRBTree(final int iSize, final float iLoadFactor) {
lastPageSize = iSize;
pageLoadFactor = iLoadFactor;
comparator = null;
init();
}
- public OTreeMap(final OTreeMapEventListener<K, V> iListener) {
+ public OMVRBTree(final OMVRBTreeEventListener<K, V> iListener) {
init();
comparator = null;
listener = iListener;
@@ -70,7 +95,7 @@ public OTreeMap(final OTreeMapEventListener<K, V> iListener) {
* the map that violates this constraint (for example, the user attempts to put a string key into a map whose keys are integers),
* the <tt>put(Object key, Object value)</tt> call will throw a <tt>ClassCastException</tt>.
*/
- public OTreeMap() {
+ public OMVRBTree() {
init();
comparator = null;
}
@@ -86,7 +111,7 @@ public OTreeMap() {
* the comparator that will be used to order this map. If <tt>null</tt>, the {@linkplain Comparable natural ordering} of
* the keys will be used.
*/
- public OTreeMap(final Comparator<? super K> comparator) {
+ public OMVRBTree(final Comparator<? super K> comparator) {
init();
this.comparator = comparator;
}
@@ -104,7 +129,7 @@ public OTreeMap(final Comparator<? super K> comparator) {
* @throws NullPointerException
* if the specified map is null
*/
- public OTreeMap(final Map<? extends K, ? extends V> m) {
+ public OMVRBTree(final Map<? extends K, ? extends V> m) {
init();
comparator = null;
putAll(m);
@@ -119,7 +144,7 @@ public OTreeMap(final Map<? extends K, ? extends V> m) {
* @throws NullPointerException
* if the specified map is null
*/
- public OTreeMap(final SortedMap<K, ? extends V> m) {
+ public OMVRBTree(final SortedMap<K, ? extends V> m) {
init();
comparator = m.comparator();
try {
@@ -132,17 +157,17 @@ public OTreeMap(final SortedMap<K, ? extends V> m) {
/**
* Create a new entry with the first key/value to handle.
*/
- protected abstract OTreeMapEntry<K, V> createEntry(final K key, final V value);
+ protected abstract OMVRBTreeEntry<K, V> createEntry(final K key, final V value);
/**
* Create a new node with the same parent of the node is splitting.
*/
- protected abstract OTreeMapEntry<K, V> createEntry(final OTreeMapEntry<K, V> parent);
+ protected abstract OMVRBTreeEntry<K, V> createEntry(final OMVRBTreeEntry<K, V> parent);
public int getNodes() {
int counter = -1;
- OTreeMapEntry<K, V> entry = getFirstEntry();
+ OMVRBTreeEntry<K, V> entry = getFirstEntry();
while (entry != null) {
entry = successor(entry);
counter++;
@@ -174,7 +199,7 @@ public int size() {
*/
@Override
public boolean containsKey(final Object key) {
- OTreeMapEntry<K, V> entry = getEntry(key);
+ OMVRBTreeEntry<K, V> entry = getEntry(key);
return entry != null;
}
@@ -191,7 +216,7 @@ public boolean containsKey(final Object key) {
*/
@Override
public boolean containsValue(final Object value) {
- for (OTreeMapEntry<K, V> e = getFirstEntry(); e != null; e = successor(e))
+ for (OMVRBTreeEntry<K, V> e = getFirstEntry(); e != null; e = successor(e))
if (valEquals(value, e.getValue()))
return true;
return false;
@@ -220,7 +245,7 @@ public V get(final Object key) {
if (size == 0)
return null;
- OTreeMapEntry<K, V> entry = getEntry(key);
+ OMVRBTreeEntry<K, V> entry = getEntry(key);
return entry == null ? null : entry.getValue();
}
@@ -282,11 +307,11 @@ public void putAll(final Map<? extends K, ? extends V> map) {
* @throws NullPointerException
* if the specified key is null and this map uses natural ordering, or its comparator does not permit null keys
*/
- final OTreeMapEntry<K, V> getEntry(final Object key) {
+ final OMVRBTreeEntry<K, V> getEntry(final Object key) {
return getEntry(key, false);
}
- final OTreeMapEntry<K, V> getEntry(final Object key, final boolean iGetContainer) {
+ final OMVRBTreeEntry<K, V> getEntry(final Object key, final boolean iGetContainer) {
if (key == null)
return null;
@@ -296,7 +321,7 @@ final OTreeMapEntry<K, V> getEntry(final Object key, final boolean iGetContainer
if (comparator != null)
return getEntryUsingComparator(key, iGetContainer);
- OTreeMapEntry<K, V> p = getBestEntryPoint(key);
+ OMVRBTreeEntry<K, V> p = getBestEntryPoint(key);
// System.out.println("Best entry point for key " + key + " is: "+p);
@@ -305,9 +330,9 @@ final OTreeMapEntry<K, V> getEntry(final Object key, final boolean iGetContainer
if (p == null)
return null;
- OTreeMapEntry<K, V> lastNode = p;
- OTreeMapEntry<K, V> prevNode = null;
- OTreeMapEntry<K, V> tmpNode;
+ OMVRBTreeEntry<K, V> lastNode = p;
+ OMVRBTreeEntry<K, V> prevNode = null;
+ OMVRBTreeEntry<K, V> tmpNode;
int beginKey = -1;
int steps = -1;
final Comparable<? super K> k = (Comparable<? super K>) key;
@@ -375,7 +400,7 @@ final OTreeMapEntry<K, V> getEntry(final Object key, final boolean iGetContainer
} finally {
checkTreeStructure(p);
- OProfiler.getInstance().updateStat("[OTreeMap.getEntry] Steps of search", steps);
+ OProfiler.getInstance().updateStat("[OMVRBTree.getEntry] Steps of search", steps);
}
return null;
@@ -384,15 +409,15 @@ final OTreeMapEntry<K, V> getEntry(final Object key, final boolean iGetContainer
/**
* Basic implementation that returns the root node.
*/
- protected OTreeMapEntry<K, V> getBestEntryPoint(final Object key) {
+ protected OMVRBTreeEntry<K, V> getBestEntryPoint(final Object key) {
return root;
}
- public OTreeMapEventListener<K, V> getListener() {
+ public OMVRBTreeEventListener<K, V> getListener() {
return listener;
}
- public void setListener(final OTreeMapEventListener<K, V> listener) {
+ public void setListener(final OMVRBTreeEventListener<K, V> listener) {
this.listener = listener;
}
@@ -402,12 +427,12 @@ public void setListener(final OTreeMapEventListener<K, V> listener) {
*
* @param iGetContainer
*/
- final OTreeMapEntry<K, V> getEntryUsingComparator(final Object key, final boolean iGetContainer) {
+ final OMVRBTreeEntry<K, V> getEntryUsingComparator(final Object key, final boolean iGetContainer) {
K k = (K) key;
Comparator<? super K> cpr = comparator;
if (cpr != null) {
- OTreeMapEntry<K, V> p = root;
- OTreeMapEntry<K, V> lastNode = null;
+ OMVRBTreeEntry<K, V> p = root;
+ OMVRBTreeEntry<K, V> lastNode = null;
while (p != null) {
lastNode = p;
@@ -451,8 +476,8 @@ else if (beginKey > 0) {
* the specified key; if no such entry exists (i.e., the greatest key in the Tree is less than the specified key), returns
* <tt>null</tt>.
*/
- final OTreeMapEntry<K, V> getCeilingEntry(final K key) {
- OTreeMapEntry<K, V> p = root;
+ final OMVRBTreeEntry<K, V> getCeilingEntry(final K key) {
+ OMVRBTreeEntry<K, V> p = root;
while (p != null) {
int cmp = compare(key, p.getKey());
if (cmp < 0) {
@@ -464,7 +489,7 @@ final OTreeMapEntry<K, V> getCeilingEntry(final K key) {
if (p.getRight() != null) {
p = p.getRight();
} else {
- OTreeMapEntry<K, V> parent = p.getParent();
+ OMVRBTreeEntry<K, V> parent = p.getParent();
Entry<K, V> ch = p;
while (parent != null && ch == parent.getRight()) {
ch = parent;
@@ -482,8 +507,8 @@ final OTreeMapEntry<K, V> getCeilingEntry(final K key) {
* Gets the entry corresponding to the specified key; if no such entry exists, returns the entry for the greatest key less than
* the specified key; if no such entry exists, returns <tt>null</tt>.
*/
- final OTreeMapEntry<K, V> getFloorEntry(final K key) {
- OTreeMapEntry<K, V> p = root;
+ final OMVRBTreeEntry<K, V> getFloorEntry(final K key) {
+ OMVRBTreeEntry<K, V> p = root;
while (p != null) {
int cmp = compare(key, p.getKey());
if (cmp > 0) {
@@ -495,7 +520,7 @@ final OTreeMapEntry<K, V> getFloorEntry(final K key) {
if (p.getLeft() != null) {
p = p.getLeft();
} else {
- OTreeMapEntry<K, V> parent = p.getParent();
+ OMVRBTreeEntry<K, V> parent = p.getParent();
Entry<K, V> ch = p;
while (parent != null && ch == parent.getLeft()) {
ch = parent;
@@ -514,8 +539,8 @@ final OTreeMapEntry<K, V> getFloorEntry(final K key) {
* Gets the entry for the least key greater than the specified key; if no such entry exists, returns the entry for the least key
* greater than the specified key; if no such entry exists returns <tt>null</tt>.
*/
- final OTreeMapEntry<K, V> getHigherEntry(final K key) {
- OTreeMapEntry<K, V> p = root;
+ final OMVRBTreeEntry<K, V> getHigherEntry(final K key) {
+ OMVRBTreeEntry<K, V> p = root;
while (p != null) {
int cmp = compare(key, p.getKey());
if (cmp < 0) {
@@ -527,7 +552,7 @@ final OTreeMapEntry<K, V> getHigherEntry(final K key) {
if (p.getRight() != null) {
p = p.getRight();
} else {
- OTreeMapEntry<K, V> parent = p.getParent();
+ OMVRBTreeEntry<K, V> parent = p.getParent();
Entry<K, V> ch = p;
while (parent != null && ch == parent.getRight()) {
ch = parent;
@@ -544,8 +569,8 @@ final OTreeMapEntry<K, V> getHigherEntry(final K key) {
* Returns the entry for the greatest key less than the specified key; if no such entry exists (i.e., the least key in the Tree is
* greater than the specified key), returns <tt>null</tt>.
*/
- final OTreeMapEntry<K, V> getLowerEntry(final K key) {
- OTreeMapEntry<K, V> p = root;
+ final OMVRBTreeEntry<K, V> getLowerEntry(final K key) {
+ OMVRBTreeEntry<K, V> p = root;
while (p != null) {
int cmp = compare(key, p.getKey());
if (cmp > 0) {
@@ -557,7 +582,7 @@ final OTreeMapEntry<K, V> getLowerEntry(final K key) {
if (p.getLeft() != null) {
p = p.getLeft();
} else {
- OTreeMapEntry<K, V> parent = p.getParent();
+ OMVRBTreeEntry<K, V> parent = p.getParent();
Entry<K, V> ch = p;
while (parent != null && ch == parent.getLeft()) {
ch = parent;
@@ -588,7 +613,7 @@ final OTreeMapEntry<K, V> getLowerEntry(final K key) {
*/
@Override
public V put(final K key, final V value) {
- OTreeMapEntry<K, V> parentNode = null;
+ OMVRBTreeEntry<K, V> parentNode = null;
try {
if (root == null) {
@@ -625,7 +650,7 @@ public V put(final K key, final V value) {
parentNode.insert(pageIndex, key, value);
} else {
// CREATE NEW NODE AND COPY HALF OF VALUES FROM THE ORIGIN TO THE NEW ONE IN ORDER TO GET VALUES BALANCED
- final OTreeMapEntry<K, V> newNode = createEntry(parentNode);
+ final OMVRBTreeEntry<K, V> newNode = createEntry(parentNode);
// System.out.println("Created new entry: " + newEntry+ ", insert the key as index="+pageIndex);
@@ -636,7 +661,7 @@ public V put(final K key, final V value) {
// INSERT IN THE NEW NODE
newNode.insert(pageIndex - parentNode.getPageSplitItems(), key, value);
- final OTreeMapEntry<K, V> prevNode = parentNode.getRight();
+ final OMVRBTreeEntry<K, V> prevNode = parentNode.getRight();
// REPLACE THE RIGHT ONE WITH THE NEW NODE
parentNode.setRight(newNode);
@@ -667,7 +692,7 @@ public V put(final K key, final V value) {
}
/**
- * Removes the mapping for this key from this OTreeMap if present.
+ * Removes the mapping for this key from this OMVRBTree if present.
*
* @param key
* key for which mapping should be removed
@@ -680,7 +705,7 @@ public V put(final K key, final V value) {
*/
@Override
public V remove(final Object key) {
- OTreeMapEntry<K, V> p = getEntry(key);
+ OMVRBTreeEntry<K, V> p = getEntry(key);
if (p == null)
return null;
@@ -700,15 +725,15 @@ public void clear() {
}
/**
- * Returns a shallow copy of this <tt>OTreeMap</tt> instance. (The keys and values themselves are not cloned.)
+ * Returns a shallow copy of this <tt>OMVRBTree</tt> instance. (The keys and values themselves are not cloned.)
*
* @return a shallow copy of this map
*/
@Override
public Object clone() {
- OTreeMap<K, V> clone = null;
+ OMVRBTree<K, V> clone = null;
try {
- clone = (OTreeMap<K, V>) super.clone();
+ clone = (OMVRBTree<K, V>) super.clone();
} catch (CloneNotSupportedException e) {
throw new InternalError();
}
@@ -757,7 +782,7 @@ public Map.Entry<K, V> lastEntry() {
* @since 1.6
*/
public Entry<K, V> pollFirstEntry() {
- OTreeMapEntry<K, V> p = getFirstEntry();
+ OMVRBTreeEntry<K, V> p = getFirstEntry();
Map.Entry<K, V> result = exportEntry(p);
if (p != null)
deleteEntry(p);
@@ -768,7 +793,7 @@ public Entry<K, V> pollFirstEntry() {
* @since 1.6
*/
public Entry<K, V> pollLastEntry() {
- OTreeMapEntry<K, V> p = getLastEntry();
+ OMVRBTreeEntry<K, V> p = getLastEntry();
Map.Entry<K, V> result = exportEntry(p);
if (p != null)
deleteEntry(p);
@@ -1026,17 +1051,17 @@ public Iterator<V> iterator() {
@Override
public int size() {
- return OTreeMap.this.size();
+ return OMVRBTree.this.size();
}
@Override
public boolean contains(Object o) {
- return OTreeMap.this.containsValue(o);
+ return OMVRBTree.this.containsValue(o);
}
@Override
public boolean remove(Object o) {
- for (OTreeMapEntry<K, V> e = getFirstEntry(); e != null; e = successor(e)) {
+ for (OMVRBTreeEntry<K, V> e = getFirstEntry(); e != null; e = successor(e)) {
if (valEquals(e.getValue(), o)) {
deleteEntry(e);
return true;
@@ -1047,7 +1072,7 @@ public boolean remove(Object o) {
@Override
public void clear() {
- OTreeMap.this.clear();
+ OMVRBTree.this.clear();
}
}
@@ -1061,7 +1086,7 @@ public Iterator<Map.Entry<K, V>> iterator() {
public boolean contains(Object o) {
if (!(o instanceof Map.Entry))
return false;
- OTreeMapEntry<K, V> entry = (OTreeMapEntry<K, V>) o;
+ OMVRBTreeEntry<K, V> entry = (OMVRBTreeEntry<K, V>) o;
V value = entry.getValue();
V p = get(entry.getKey());
return p != null && valEquals(p, value);
@@ -1071,9 +1096,9 @@ public boolean contains(Object o) {
public boolean remove(Object o) {
if (!(o instanceof Map.Entry))
return false;
- OTreeMapEntry<K, V> entry = (OTreeMapEntry<K, V>) o;
+ OMVRBTreeEntry<K, V> entry = (OMVRBTreeEntry<K, V>) o;
V value = entry.getValue();
- OTreeMapEntry<K, V> p = getEntry(entry.getKey());
+ OMVRBTreeEntry<K, V> p = getEntry(entry.getKey());
if (p != null && valEquals(p.getValue(), value)) {
deleteEntry(p);
return true;
@@ -1083,12 +1108,12 @@ public boolean remove(Object o) {
@Override
public int size() {
- return OTreeMap.this.size();
+ return OMVRBTree.this.size();
}
@Override
public void clear() {
- OTreeMap.this.clear();
+ OMVRBTree.this.clear();
}
}
@@ -1116,17 +1141,17 @@ static final class KeySet<E> extends AbstractSet<E> implements ONavigableSet<E>
@Override
public Iterator<E> iterator() {
- if (m instanceof OTreeMap)
- return ((OTreeMap<E, Object>) m).keyIterator();
+ if (m instanceof OMVRBTree)
+ return ((OMVRBTree<E, Object>) m).keyIterator();
else
- return (((OTreeMap.NavigableSubMap) m).keyIterator());
+ return (((OMVRBTree.NavigableSubMap) m).keyIterator());
}
public Iterator<E> descendingIterator() {
- if (m instanceof OTreeMap)
- return ((OTreeMap<E, Object>) m).descendingKeyIterator();
+ if (m instanceof OMVRBTree)
+ return ((OMVRBTree<E, Object>) m).descendingKeyIterator();
else
- return (((OTreeMap.NavigableSubMap) m).descendingKeyIterator());
+ return (((OMVRBTree.NavigableSubMap) m).descendingKeyIterator());
}
@Override
@@ -1195,15 +1220,15 @@ public boolean remove(Object o) {
}
public ONavigableSet<E> subSet(E fromElement, boolean fromInclusive, E toElement, boolean toInclusive) {
- return new OTreeSetMemory<E>(m.subMap(fromElement, fromInclusive, toElement, toInclusive));
+ return new OMVRBTreeSetMemory<E>(m.subMap(fromElement, fromInclusive, toElement, toInclusive));
}
public ONavigableSet<E> headSet(E toElement, boolean inclusive) {
- return new OTreeSetMemory<E>(m.headMap(toElement, inclusive));
+ return new OMVRBTreeSetMemory<E>(m.headMap(toElement, inclusive));
}
public ONavigableSet<E> tailSet(E fromElement, boolean inclusive) {
- return new OTreeSetMemory<E>(m.tailMap(fromElement, inclusive));
+ return new OMVRBTreeSetMemory<E>(m.tailMap(fromElement, inclusive));
}
public SortedSet<E> subSet(E fromElement, E toElement) {
@@ -1219,12 +1244,12 @@ public SortedSet<E> tailSet(E fromElement) {
}
public ONavigableSet<E> descendingSet() {
- return new OTreeSetMemory<E>(m.descendingMap());
+ return new OMVRBTreeSetMemory<E>(m.descendingMap());
}
}
final class EntryIterator extends AbstractEntryIterator<K, V, Map.Entry<K, V>> {
- EntryIterator(OTreeMapEntry<K, V> first) {
+ EntryIterator(OMVRBTreeEntry<K, V> first) {
super(first);
}
@@ -1234,7 +1259,7 @@ public Map.Entry<K, V> next() {
}
final class ValueIterator extends AbstractEntryIterator<K, V, V> {
- ValueIterator(OTreeMapEntry<K, V> first) {
+ ValueIterator(OMVRBTreeEntry<K, V> first) {
super(first);
}
@@ -1244,7 +1269,7 @@ public V next() {
}
final class KeyIterator extends AbstractEntryIterator<K, V, K> {
- KeyIterator(OTreeMapEntry<K, V> first) {
+ KeyIterator(OMVRBTreeEntry<K, V> first) {
super(first);
}
@@ -1254,7 +1279,7 @@ public K next() {
}
final class DescendingKeyIterator extends AbstractEntryIterator<K, V, K> {
- DescendingKeyIterator(OTreeMapEntry<K, V> first) {
+ DescendingKeyIterator(OMVRBTreeEntry<K, V> first) {
super(first);
}
@@ -1266,7 +1291,7 @@ public K next() {
// Little utilities
/**
- * Compares two keys using the correct comparison method for this OTreeMap.
+ * Compares two keys using the correct comparison method for this OMVRBTree.
*/
final int compare(Object k1, Object k2) {
return comparator == null ? ((Comparable<? super K>) k1).compareTo((K) k2) : comparator.compare((K) k1, (K) k2);
@@ -1282,14 +1307,14 @@ final static boolean valEquals(Object o1, Object o2) {
/**
* Return SimpleImmutableEntry for entry, or null if null
*/
- static <K, V> Map.Entry<K, V> exportEntry(OTreeMapEntry<K, V> e) {
+ static <K, V> Map.Entry<K, V> exportEntry(OMVRBTreeEntry<K, V> e) {
return e == null ? null : new OSimpleImmutableEntry<K, V>(e);
}
/**
* Return key for entry, or null if null
*/
- static <K, V> K keyOrNull(OTreeMapEntry<K, V> e) {
+ static <K, V> K keyOrNull(OMVRBTreeEntry<K, V> e) {
return e == null ? null : e.getKey();
}
@@ -1299,7 +1324,7 @@ static <K, V> K keyOrNull(OTreeMapEntry<K, V> e) {
* @throws NoSuchElementException
* if the Entry is null
*/
- static <K> K key(OTreeMapEntry<K, ?> e) {
+ static <K> K key(OMVRBTreeEntry<K, ?> e) {
if (e == null)
throw new NoSuchElementException();
return e.getKey();
@@ -1315,7 +1340,7 @@ static abstract class NavigableSubMap<K, V> extends AbstractMap<K, V> implements
/**
* The backing map.
*/
- final OTreeMap<K, V> m;
+ final OMVRBTree<K, V> m;
/**
* Endpoints are represented as triples (fromStart, lo, loInclusive) and (toEnd, hi, hiInclusive). If fromStart is true, then
@@ -1326,7 +1351,7 @@ static abstract class NavigableSubMap<K, V> extends AbstractMap<K, V> implements
final boolean fromStart, toEnd;
final boolean loInclusive, hiInclusive;
- NavigableSubMap(OTreeMap<K, V> m, boolean fromStart, K lo, boolean loInclusive, boolean toEnd, K hi, boolean hiInclusive) {
+ NavigableSubMap(OMVRBTree<K, V> m, boolean fromStart, K lo, boolean loInclusive, boolean toEnd, K hi, boolean hiInclusive) {
if (!fromStart && !toEnd) {
if (m.compare(lo, hi) > 0)
throw new IllegalArgumentException("fromKey > toKey");
@@ -1383,68 +1408,68 @@ final boolean inRange(Object key, boolean inclusive) {
* descending maps
*/
- final OTreeMapEntry<K, V> absLowest() {
- OTreeMapEntry<K, V> e = (fromStart ? m.getFirstEntry() : (loInclusive ? m.getCeilingEntry(lo) : m.getHigherEntry(lo)));
+ final OMVRBTreeEntry<K, V> absLowest() {
+ OMVRBTreeEntry<K, V> e = (fromStart ? m.getFirstEntry() : (loInclusive ? m.getCeilingEntry(lo) : m.getHigherEntry(lo)));
return (e == null || tooHigh(e.getKey())) ? null : e;
}
- final OTreeMapEntry<K, V> absHighest() {
- OTreeMapEntry<K, V> e = (toEnd ? m.getLastEntry() : (hiInclusive ? m.getFloorEntry(hi) : m.getLowerEntry(hi)));
+ final OMVRBTreeEntry<K, V> absHighest() {
+ OMVRBTreeEntry<K, V> e = (toEnd ? m.getLastEntry() : (hiInclusive ? m.getFloorEntry(hi) : m.getLowerEntry(hi)));
return (e == null || tooLow(e.getKey())) ? null : e;
}
- final OTreeMapEntry<K, V> absCeiling(K key) {
+ final OMVRBTreeEntry<K, V> absCeiling(K key) {
if (tooLow(key))
return absLowest();
- OTreeMapEntry<K, V> e = m.getCeilingEntry(key);
+ OMVRBTreeEntry<K, V> e = m.getCeilingEntry(key);
return (e == null || tooHigh(e.getKey())) ? null : e;
}
- final OTreeMapEntry<K, V> absHigher(K key) {
+ final OMVRBTreeEntry<K, V> absHigher(K key) {
if (tooLow(key))
return absLowest();
- OTreeMapEntry<K, V> e = m.getHigherEntry(key);
+ OMVRBTreeEntry<K, V> e = m.getHigherEntry(key);
return (e == null || tooHigh(e.getKey())) ? null : e;
}
- final OTreeMapEntry<K, V> absFloor(K key) {
+ final OMVRBTreeEntry<K, V> absFloor(K key) {
if (tooHigh(key))
return absHighest();
- OTreeMapEntry<K, V> e = m.getFloorEntry(key);
+ OMVRBTreeEntry<K, V> e = m.getFloorEntry(key);
return (e == null || tooLow(e.getKey())) ? null : e;
}
- final OTreeMapEntry<K, V> absLower(K key) {
+ final OMVRBTreeEntry<K, V> absLower(K key) {
if (tooHigh(key))
return absHighest();
- OTreeMapEntry<K, V> e = m.getLowerEntry(key);
+ OMVRBTreeEntry<K, V> e = m.getLowerEntry(key);
return (e == null || tooLow(e.getKey())) ? null : e;
}
/** Returns the absolute high fence for ascending traversal */
- final OTreeMapEntry<K, V> absHighFence() {
+ final OMVRBTreeEntry<K, V> absHighFence() {
return (toEnd ? null : (hiInclusive ? m.getHigherEntry(hi) : m.getCeilingEntry(hi)));
}
/** Return the absolute low fence for descending traversal */
- final OTreeMapEntry<K, V> absLowFence() {
+ final OMVRBTreeEntry<K, V> absLowFence() {
return (fromStart ? null : (loInclusive ? m.getLowerEntry(lo) : m.getFloorEntry(lo)));
}
// Abstract methods defined in ascending vs descending classes
// These relay to the appropriate absolute versions
- abstract OTreeMapEntry<K, V> subLowest();
+ abstract OMVRBTreeEntry<K, V> subLowest();
- abstract OTreeMapEntry<K, V> subHighest();
+ abstract OMVRBTreeEntry<K, V> subHighest();
- abstract OTreeMapEntry<K, V> subCeiling(K key);
+ abstract OMVRBTreeEntry<K, V> subCeiling(K key);
- abstract OTreeMapEntry<K, V> subHigher(K key);
+ abstract OMVRBTreeEntry<K, V> subHigher(K key);
- abstract OTreeMapEntry<K, V> subFloor(K key);
+ abstract OMVRBTreeEntry<K, V> subFloor(K key);
- abstract OTreeMapEntry<K, V> subLower(K key);
+ abstract OMVRBTreeEntry<K, V> subLower(K key);
/** Returns ascending iterator from the perspective of this submap */
abstract Iterator<K> keyIterator();
@@ -1535,7 +1560,7 @@ public final Map.Entry<K, V> lastEntry() {
}
public final Map.Entry<K, V> pollFirstEntry() {
- OTreeMapEntry<K, V> e = subLowest();
+ OMVRBTreeEntry<K, V> e = subLowest();
Map.Entry<K, V> result = exportEntry(e);
if (e != null)
m.deleteEntry(e);
@@ -1543,7 +1568,7 @@ public final Map.Entry<K, V> pollFirstEntry() {
}
public final Map.Entry<K, V> pollLastEntry() {
- OTreeMapEntry<K, V> e = subHighest();
+ OMVRBTreeEntry<K, V> e = subHighest();
Map.Entry<K, V> result = exportEntry(e);
if (e != null)
m.deleteEntry(e);
@@ -1558,7 +1583,7 @@ public final Map.Entry<K, V> pollLastEntry() {
@SuppressWarnings("rawtypes")
public final ONavigableSet<K> navigableKeySet() {
KeySet<K> nksv = navigableKeySetView;
- return (nksv != null) ? nksv : (navigableKeySetView = new OTreeMap.KeySet(this));
+ return (nksv != null) ? nksv : (navigableKeySetView = new OMVRBTree.KeySet(this));
}
@Override
@@ -1605,15 +1630,15 @@ public int size() {
@Override
public boolean isEmpty() {
- OTreeMapEntry<K, V> n = absLowest();
+ OMVRBTreeEntry<K, V> n = absLowest();
return n == null || tooHigh(n.getKey());
}
@Override
public boolean contains(final Object o) {
- if (!(o instanceof OTreeMapEntry))
+ if (!(o instanceof OMVRBTreeEntry))
return false;
- OTreeMapEntry<K, V> entry = (OTreeMapEntry<K, V>) o;
+ OMVRBTreeEntry<K, V> entry = (OMVRBTreeEntry<K, V>) o;
K key = entry.getKey();
if (!inRange(key))
return false;
@@ -1623,13 +1648,13 @@ public boolean contains(final Object o) {
@Override
public boolean remove(final Object o) {
- if (!(o instanceof OTreeMapEntry))
+ if (!(o instanceof OMVRBTreeEntry))
return false;
- final OTreeMapEntry<K, V> entry = (OTreeMapEntry<K, V>) o;
+ final OMVRBTreeEntry<K, V> entry = (OMVRBTreeEntry<K, V>) o;
K key = entry.getKey();
if (!inRange(key))
return false;
- final OTreeMapEntry<K, V> node = m.getEntry(key);
+ final OMVRBTreeEntry<K, V> node = m.getEntry(key);
if (node != null && valEquals(node.getValue(), entry.getValue())) {
m.deleteEntry(node);
return true;
@@ -1642,12 +1667,12 @@ public boolean remove(final Object o) {
* Iterators for SubMaps
*/
abstract class SubMapIterator<T> implements Iterator<T> {
- OTreeMapEntry<K, V> lastReturned;
- OTreeMapEntry<K, V> next;
+ OMVRBTreeEntry<K, V> lastReturned;
+ OMVRBTreeEntry<K, V> next;
final K fenceKey;
int expectedModCount;
- SubMapIterator(final OTreeMapEntry<K, V> first, final OTreeMapEntry<K, V> fence) {
+ SubMapIterator(final OMVRBTreeEntry<K, V> first, final OMVRBTreeEntry<K, V> fence) {
expectedModCount = m.modCount;
lastReturned = null;
next = first;
@@ -1658,8 +1683,8 @@ public final boolean hasNext() {
return next != null && next.getKey() != fenceKey;
}
- final OTreeMapEntry<K, V> nextEntry() {
- OTreeMapEntry<K, V> e = next;
+ final OMVRBTreeEntry<K, V> nextEntry() {
+ OMVRBTreeEntry<K, V> e = next;
if (e == null || e.getKey() == fenceKey)
throw new NoSuchElementException();
if (m.modCount != expectedModCount)
@@ -1669,8 +1694,8 @@ final OTreeMapEntry<K, V> nextEntry() {
return e;
}
- final OTreeMapEntry<K, V> prevEntry() {
- OTreeMapEntry<K, V> e = next;
+ final OMVRBTreeEntry<K, V> prevEntry() {
+ OMVRBTreeEntry<K, V> e = next;
if (e == null || e.getKey() == fenceKey)
throw new NoSuchElementException();
if (m.modCount != expectedModCount)
@@ -1706,7 +1731,7 @@ final void removeDescending() {
}
final class SubMapEntryIterator extends SubMapIterator<Map.Entry<K, V>> {
- SubMapEntryIterator(final OTreeMapEntry<K, V> first, final OTreeMapEntry<K, V> fence) {
+ SubMapEntryIterator(final OMVRBTreeEntry<K, V> first, final OMVRBTreeEntry<K, V> fence) {
super(first, fence);
}
@@ -1720,7 +1745,7 @@ public void remove() {
}
final class SubMapKeyIterator extends SubMapIterator<K> {
- SubMapKeyIterator(final OTreeMapEntry<K, V> first, final OTreeMapEntry<K, V> fence) {
+ SubMapKeyIterator(final OMVRBTreeEntry<K, V> first, final OMVRBTreeEntry<K, V> fence) {
super(first, fence);
}
@@ -1734,7 +1759,7 @@ public void remove() {
}
final class DescendingSubMapEntryIterator extends SubMapIterator<Map.Entry<K, V>> {
- DescendingSubMapEntryIterator(final OTreeMapEntry<K, V> last, final OTreeMapEntry<K, V> fence) {
+ DescendingSubMapEntryIterator(final OMVRBTreeEntry<K, V> last, final OMVRBTreeEntry<K, V> fence) {
super(last, fence);
}
@@ -1748,7 +1773,7 @@ public void remove() {
}
final class DescendingSubMapKeyIterator extends SubMapIterator<K> {
- DescendingSubMapKeyIterator(final OTreeMapEntry<K, V> last, final OTreeMapEntry<K, V> fence) {
+ DescendingSubMapKeyIterator(final OMVRBTreeEntry<K, V> last, final OMVRBTreeEntry<K, V> fence) {
super(last, fence);
}
@@ -1768,7 +1793,7 @@ public void remove() {
static final class AscendingSubMap<K, V> extends NavigableSubMap<K, V> {
private static final long serialVersionUID = 912986545866124060L;
- AscendingSubMap(final OTreeMap<K, V> m, final boolean fromStart, final K lo, final boolean loInclusive, final boolean toEnd,
+ AscendingSubMap(final OMVRBTree<K, V> m, final boolean fromStart, final K lo, final boolean loInclusive, final boolean toEnd,
K hi, final boolean hiInclusive) {
super(m, fromStart, lo, loInclusive, toEnd, hi, hiInclusive);
}
@@ -1827,32 +1852,32 @@ public Set<Map.Entry<K, V>> entrySet() {
}
@Override
- OTreeMapEntry<K, V> subLowest() {
+ OMVRBTreeEntry<K, V> subLowest() {
return absLowest();
}
@Override
- OTreeMapEntry<K, V> subHighest() {
+ OMVRBTreeEntry<K, V> subHighest() {
return absHighest();
}
@Override
- OTreeMapEntry<K, V> subCeiling(final K key) {
+ OMVRBTreeEntry<K, V> subCeiling(final K key) {
return absCeiling(key);
}
@Override
- OTreeMapEntry<K, V> subHigher(final K key) {
+ OMVRBTreeEntry<K, V> subHigher(final K key) {
return absHigher(key);
}
@Override
- OTreeMapEntry<K, V> subFloor(final K key) {
+ OMVRBTreeEntry<K, V> subFloor(final K key) {
return absFloor(key);
}
@Override
- OTreeMapEntry<K, V> subLower(final K key) {
+ OMVRBTreeEntry<K, V> subLower(final K key) {
return absLower(key);
}
}
@@ -1865,7 +1890,7 @@ static final class DescendingSubMap<K, V> extends NavigableSubMap<K, V> {
private final Comparator<? super K> reverseComparator = Collections.reverseOrder(m.comparator);
- DescendingSubMap(final OTreeMap<K, V> m, final boolean fromStart, final K lo, final boolean loInclusive, final boolean toEnd,
+ DescendingSubMap(final OMVRBTree<K, V> m, final boolean fromStart, final K lo, final boolean loInclusive, final boolean toEnd,
final K hi, final boolean hiInclusive) {
super(m, fromStart, lo, loInclusive, toEnd, hi, hiInclusive);
}
@@ -1924,32 +1949,32 @@ public Set<Map.Entry<K, V>> entrySet() {
}
@Override
- OTreeMapEntry<K, V> subLowest() {
+ OMVRBTreeEntry<K, V> subLowest() {
return absHighest();
}
@Override
- OTreeMapEntry<K, V> subHighest() {
+ OMVRBTreeEntry<K, V> subHighest() {
return absLowest();
}
@Override
- OTreeMapEntry<K, V> subCeiling(final K key) {
+ OMVRBTreeEntry<K, V> subCeiling(final K key) {
return absFloor(key);
}
@Override
- OTreeMapEntry<K, V> subHigher(final K key) {
+ OMVRBTreeEntry<K, V> subHigher(final K key) {
return absLower(key);
}
@Override
- OTreeMapEntry<K, V> subFloor(final K key) {
+ OMVRBTreeEntry<K, V> subFloor(final K key) {
return absCeiling(key);
}
@Override
- OTreeMapEntry<K, V> subLower(final K key) {
+ OMVRBTreeEntry<K, V> subLower(final K key) {
return absHigher(key);
}
}
@@ -1964,10 +1989,10 @@ OTreeMapEntry<K, V> subLower(final K key) {
*/
/**
- * Returns the first Entry in the OTreeMap (according to the OTreeMap's key-sort function). Returns null if the OTreeMap is empty.
+ * Returns the first Entry in the OMVRBTree (according to the OMVRBTree's key-sort function). Returns null if the OMVRBTree is empty.
*/
- protected OTreeMapEntry<K, V> getFirstEntry() {
- OTreeMapEntry<K, V> p = root;
+ protected OMVRBTreeEntry<K, V> getFirstEntry() {
+ OMVRBTreeEntry<K, V> p = root;
if (p != null) {
if (p.getSize() > 0)
pageIndex = 0;
@@ -1979,10 +2004,10 @@ protected OTreeMapEntry<K, V> getFirstEntry() {
}
/**
- * Returns the last Entry in the OTreeMap (according to the OTreeMap's key-sort function). Returns null if the OTreeMap is empty.
+ * Returns the last Entry in the OMVRBTree (according to the OMVRBTree's key-sort function). Returns null if the OMVRBTree is empty.
*/
- protected final OTreeMapEntry<K, V> getLastEntry() {
- OTreeMapEntry<K, V> p = root;
+ protected final OMVRBTreeEntry<K, V> getLastEntry() {
+ OMVRBTreeEntry<K, V> p = root;
if (p != null)
while (p.getRight() != null)
p = p.getRight();
@@ -1996,11 +2021,11 @@ protected final OTreeMapEntry<K, V> getLastEntry() {
/**
* Returns the successor of the specified Entry, or null if no such.
*/
- public static <K, V> OTreeMapEntry<K, V> successor(final OTreeMapEntry<K, V> t) {
+ public static <K, V> OMVRBTreeEntry<K, V> successor(final OMVRBTreeEntry<K, V> t) {
if (t == null)
return null;
- OTreeMapEntry<K, V> p = null;
+ OMVRBTreeEntry<K, V> p = null;
if (t.getRight() != null) {
p = t.getRight();
@@ -2008,7 +2033,7 @@ public static <K, V> OTreeMapEntry<K, V> successor(final OTreeMapEntry<K, V> t)
p = p.getLeft();
} else {
p = t.getParent();
- OTreeMapEntry<K, V> ch = t;
+ OMVRBTreeEntry<K, V> ch = t;
while (p != null && ch == p.getRight()) {
ch = p;
p = p.getParent();
@@ -2021,16 +2046,16 @@ public static <K, V> OTreeMapEntry<K, V> successor(final OTreeMapEntry<K, V> t)
/**
* Returns the predecessor of the specified Entry, or null if no such.
*/
- public static <K, V> OTreeMapEntry<K, V> predecessor(final OTreeMapEntry<K, V> t) {
+ public static <K, V> OMVRBTreeEntry<K, V> predecessor(final OMVRBTreeEntry<K, V> t) {
if (t == null)
return null;
else if (t.getLeft() != null) {
- OTreeMapEntry<K, V> p = t.getLeft();
+ OMVRBTreeEntry<K, V> p = t.getLeft();
while (p.getRight() != null)
p = p.getRight();
return p;
} else {
- OTreeMapEntry<K, V> p = t.getParent();
+ OMVRBTreeEntry<K, V> p = t.getParent();
Entry<K, V> ch = t;
while (p != null && ch == p.getLeft()) {
ch = p;
@@ -2048,31 +2073,31 @@ else if (t.getLeft() != null) {
* checks in the main algorithms.
*/
- private static <K, V> boolean colorOf(final OTreeMapEntry<K, V> p) {
+ private static <K, V> boolean colorOf(final OMVRBTreeEntry<K, V> p) {
return (p == null ? BLACK : p.getColor());
}
- private static <K, V> OTreeMapEntry<K, V> parentOf(final OTreeMapEntry<K, V> p) {
+ private static <K, V> OMVRBTreeEntry<K, V> parentOf(final OMVRBTreeEntry<K, V> p) {
return (p == null ? null : p.getParent());
}
- private static <K, V> void setColor(final OTreeMapEntry<K, V> p, final boolean c) {
+ private static <K, V> void setColor(final OMVRBTreeEntry<K, V> p, final boolean c) {
if (p != null)
p.setColor(c);
}
- private static <K, V> OTreeMapEntry<K, V> leftOf(final OTreeMapEntry<K, V> p) {
+ private static <K, V> OMVRBTreeEntry<K, V> leftOf(final OMVRBTreeEntry<K, V> p) {
return (p == null) ? null : p.getLeft();
}
- private static <K, V> OTreeMapEntry<K, V> rightOf(final OTreeMapEntry<K, V> p) {
+ private static <K, V> OMVRBTreeEntry<K, V> rightOf(final OMVRBTreeEntry<K, V> p) {
return (p == null) ? null : p.getRight();
}
/** From CLR */
- private void rotateLeft(final OTreeMapEntry<K, V> p) {
+ private void rotateLeft(final OMVRBTreeEntry<K, V> p) {
if (p != null) {
- OTreeMapEntry<K, V> r = p.getRight();
+ OMVRBTreeEntry<K, V> r = p.getRight();
p.setRight(r.getLeft());
if (r.getLeft() != null)
r.getLeft().setParent(p);
@@ -2088,14 +2113,14 @@ else if (p.getParent().getLeft() == p)
}
}
- protected void setRoot(final OTreeMapEntry<K, V> iRoot) {
+ protected void setRoot(final OMVRBTreeEntry<K, V> iRoot) {
root = iRoot;
}
/** From CLR */
- private void rotateRight(final OTreeMapEntry<K, V> p) {
+ private void rotateRight(final OMVRBTreeEntry<K, V> p) {
if (p != null) {
- OTreeMapEntry<K, V> l = p.getLeft();
+ OMVRBTreeEntry<K, V> l = p.getLeft();
p.setLeft(l.getRight());
if (l.getRight() != null)
l.getRight().setParent(p);
@@ -2113,22 +2138,22 @@ else if (p.getParent().getRight() == p)
/** From CLR */
/*
- * private void fixAfterInsertion(OTreeMapEntry<K, V> x) { x.setColor(RED);
+ * private void fixAfterInsertion(OMVRBTreeEntry<K, V> x) { x.setColor(RED);
*
* // if (x != null && x != root && x.getParent() != null && x.getParent().getColor() == RED) { //
* //System.out.println("BEFORE FIX on node: " + x); // printInMemoryStructure(x);
*
- * OTreeMapEntry<K, V> parent; OTreeMapEntry<K, V> grandParent;
+ * OMVRBTreeEntry<K, V> parent; OMVRBTreeEntry<K, V> grandParent;
*
* while (x != null && x != root && x.getParent() != null && x.getParent().getColor() == RED) { parent = parentOf(x); grandParent
* = parentOf(parent);
*
- * if (parent == leftOf(grandParent)) { // MY PARENT IS THE LEFT OF THE GRANDFATHER. GET MY UNCLE final OTreeMapEntry<K, V> uncle
+ * if (parent == leftOf(grandParent)) { // MY PARENT IS THE LEFT OF THE GRANDFATHER. GET MY UNCLE final OMVRBTreeEntry<K, V> uncle
* = rightOf(grandParent); if (colorOf(uncle) == RED) { // SET MY PARENT AND UNCLE TO BLACK setColor(parent, BLACK);
* setColor(uncle, BLACK); // SET GRANDPARENT'S COLOR TO RED setColor(grandParent, RED); // CONTINUE RECURSIVELY WITH MY
* GRANDFATHER x = grandParent; } else { if (x == rightOf(parent)) { // I'M THE RIGHT x = parent; parent = parentOf(x);
* grandParent = parentOf(parent); rotateLeft(x); } setColor(parent, BLACK); setColor(grandParent, RED); rotateRight(grandParent);
- * } } else { // MY PARENT IS THE RIGHT OF THE GRANDFATHER. GET MY UNCLE final OTreeMapEntry<K, V> uncle = leftOf(grandParent); if
+ * } } else { // MY PARENT IS THE RIGHT OF THE GRANDFATHER. GET MY UNCLE final OMVRBTreeEntry<K, V> uncle = leftOf(grandParent); if
* (colorOf(uncle) == RED) { setColor(parent, BLACK); setColor(uncle, BLACK); setColor(grandParent, RED); x = grandParent; } else
* { if (x == leftOf(parent)) { x = parentOf(x); parent = parentOf(x); grandParent = parentOf(parent); rotateRight(x); }
* setColor(parent, BLACK); setColor(grandParent, RED); rotateLeft(grandParent); } } }
@@ -2138,32 +2163,32 @@ else if (p.getParent().getRight() == p)
* root.setColor(BLACK); }
*/
- private OTreeMapEntry<K, V> grandparent(final OTreeMapEntry<K, V> n) {
+ private OMVRBTreeEntry<K, V> grandparent(final OMVRBTreeEntry<K, V> n) {
return parentOf(parentOf(n));
}
- private OTreeMapEntry<K, V> uncle(final OTreeMapEntry<K, V> n) {
+ private OMVRBTreeEntry<K, V> uncle(final OMVRBTreeEntry<K, V> n) {
if (parentOf(n) == leftOf(grandparent(n)))
return rightOf(grandparent(n));
else
return leftOf(grandparent(n));
}
- private void fixAfterInsertion(final OTreeMapEntry<K, V> n) {
+ private void fixAfterInsertion(final OMVRBTreeEntry<K, V> n) {
if (parentOf(n) == null)
setColor(n, BLACK);
else
insert_case2(n);
}
- private void insert_case2(final OTreeMapEntry<K, V> n) {
+ private void insert_case2(final OMVRBTreeEntry<K, V> n) {
if (colorOf(parentOf(n)) == BLACK)
return; /* Tree is still valid */
else
insert_case3(n);
}
- private void insert_case3(final OTreeMapEntry<K, V> n) {
+ private void insert_case3(final OMVRBTreeEntry<K, V> n) {
if (uncle(n) != null && colorOf(uncle(n)) == RED) {
setColor(parentOf(n), BLACK);
setColor(uncle(n), BLACK);
@@ -2173,7 +2198,7 @@ private void insert_case3(final OTreeMapEntry<K, V> n) {
insert_case4(n);
}
- private void insert_case4(OTreeMapEntry<K, V> n) {
+ private void insert_case4(OMVRBTreeEntry<K, V> n) {
if (n == rightOf(parentOf(n)) && parentOf(n) == leftOf(grandparent(n))) {
rotateLeft(parentOf(n));
n = leftOf(n);
@@ -2184,7 +2209,7 @@ private void insert_case4(OTreeMapEntry<K, V> n) {
insert_case5(n);
}
- private void insert_case5(final OTreeMapEntry<K, V> n) {
+ private void insert_case5(final OMVRBTreeEntry<K, V> n) {
setColor(parentOf(n), BLACK);
setColor(grandparent(n), RED);
if (n == leftOf(parentOf(n)) && parentOf(n) == leftOf(grandparent(n))) {
@@ -2201,7 +2226,7 @@ private void insert_case5(final OTreeMapEntry<K, V> n) {
* @param iIndex
* -1 = delete the node, otherwise the item inside of it
*/
- void deleteEntry(OTreeMapEntry<K, V> p) {
+ void deleteEntry(OMVRBTreeEntry<K, V> p) {
size--;
if (listener != null)
@@ -2221,13 +2246,13 @@ void deleteEntry(OTreeMapEntry<K, V> p) {
// If strictly internal, copy successor's element to p and then make p
// point to successor.
if (p.getLeft() != null && p.getRight() != null) {
- OTreeMapEntry<K, V> s = successor(p);
+ OMVRBTreeEntry<K, V> s = successor(p);
p.copyFrom(s);
p = s;
} // p has 2 children
// Start fixup at replacement node, if it exists.
- final OTreeMapEntry<K, V> replacement = (p.getLeft() != null ? p.getLeft() : p.getRight());
+ final OMVRBTreeEntry<K, V> replacement = (p.getLeft() != null ? p.getLeft() : p.getRight());
if (replacement != null) {
// Link replacement to parent
@@ -2262,10 +2287,10 @@ else if (p == p.getParent().getRight())
}
/** From CLR */
- private void fixAfterDeletion(OTreeMapEntry<K, V> x) {
+ private void fixAfterDeletion(OMVRBTreeEntry<K, V> x) {
while (x != root && colorOf(x) == BLACK) {
if (x == leftOf(parentOf(x))) {
- OTreeMapEntry<K, V> sib = rightOf(parentOf(x));
+ OMVRBTreeEntry<K, V> sib = rightOf(parentOf(x));
if (colorOf(sib) == RED) {
setColor(sib, BLACK);
@@ -2291,7 +2316,7 @@ private void fixAfterDeletion(OTreeMapEntry<K, V> x) {
x = root;
}
} else { // symmetric
- OTreeMapEntry<K, V> sib = leftOf(parentOf(x));
+ OMVRBTreeEntry<K, V> sib = leftOf(parentOf(x));
if (colorOf(sib) == RED) {
setColor(sib, BLACK);
@@ -2325,11 +2350,11 @@ private void fixAfterDeletion(OTreeMapEntry<K, V> x) {
private static final long serialVersionUID = 919286545866124006L;
/**
- * Save the state of the <tt>OTreeMap</tt> instance to a stream (i.e., serialize it).
+ * Save the state of the <tt>OMVRBTree</tt> instance to a stream (i.e., serialize it).
*
- * @serialData The <i>size</i> of the OTreeMap (the number of key-value mappings) is emitted (int), followed by the key (Object)
- * and value (Object) for each key-value mapping represented by the OTreeMap. The key-value mappings are emitted in
- * key-order (as determined by the OTreeMap's Comparator, or by the keys' natural ordering if the OTreeMap has no
+ * @serialData The <i>size</i> of the OMVRBTree (the number of key-value mappings) is emitted (int), followed by the key (Object)
+ * and value (Object) for each key-value mapping represented by the OMVRBTree. The key-value mappings are emitted in
+ * key-order (as determined by the OMVRBTree's Comparator, or by the keys' natural ordering if the OMVRBTree has no
* Comparator).
*/
private void writeObject(final java.io.ObjectOutputStream s) throws java.io.IOException {
@@ -2348,7 +2373,7 @@ private void writeObject(final java.io.ObjectOutputStream s) throws java.io.IOEx
}
/**
- * Reconstitute the <tt>OTreeMap</tt> instance from a stream (i.e., deserialize it).
+ * Reconstitute the <tt>OMVRBTree</tt> instance from a stream (i.e., deserialize it).
*/
private void readObject(final java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException {
// Read in the Comparator and any hidden stuff
@@ -2382,7 +2407,7 @@ void addAllForOTreeSet(SortedSet<? extends K> set, V defaultVal) {
* stream of alternating serialized keys and values. (it == null, defaultVal == null). 4) A stream of serialized keys. (it ==
* null, defaultVal != null).
*
- * It is assumed that the comparator of the OTreeMap is already set prior to calling this method.
+ * It is assumed that the comparator of the OMVRBTree is already set prior to calling this method.
*
* @param size
* the number of keys (or key-value pairs) to be read from the iterator or stream
@@ -2407,7 +2432,7 @@ private void buildFromSorted(final int size, final Iterator<?> it, final java.io
/**
* Recursive "helper method" that does the real work of the previous method. Identically named parameters have identical
- * definitions. Additional parameters are documented below. It is assumed that the comparator and size fields of the OTreeMap are
+ * definitions. Additional parameters are documented below. It is assumed that the comparator and size fields of the OMVRBTree are
* already set prior to calling this method. (It ignores both fields.)
*
* @param level
@@ -2419,7 +2444,7 @@ private void buildFromSorted(final int size, final Iterator<?> it, final java.io
* @param redLevel
* the level at which nodes should be red. Must be equal to computeRedLevel for tree of this size.
*/
- private final OTreeMapEntry<K, V> buildFromSorted(final int level, final int lo, final int hi, final int redLevel,
+ private final OMVRBTreeEntry<K, V> buildFromSorted(final int level, final int lo, final int hi, final int redLevel,
final Iterator<?> it, final java.io.ObjectInputStream str, final V defaultVal) throws java.io.IOException,
ClassNotFoundException {
/*
@@ -2435,7 +2460,7 @@ private final OTreeMapEntry<K, V> buildFromSorted(final int level, final int lo,
final int mid = (lo + hi) / 2;
- OTreeMapEntry<K, V> left = null;
+ OMVRBTreeEntry<K, V> left = null;
if (lo < mid)
left = buildFromSorted(level + 1, lo, mid - 1, redLevel, it, str, defaultVal);
@@ -2444,7 +2469,7 @@ private final OTreeMapEntry<K, V> buildFromSorted(final int level, final int lo,
V value;
if (it != null) {
if (defaultVal == null) {
- OTreeMapEntry<K, V> entry = (OTreeMapEntry<K, V>) it.next();
+ OMVRBTreeEntry<K, V> entry = (OMVRBTreeEntry<K, V>) it.next();
key = entry.getKey();
value = entry.getValue();
} else {
@@ -2456,7 +2481,7 @@ private final OTreeMapEntry<K, V> buildFromSorted(final int level, final int lo,
value = (defaultVal != null ? defaultVal : (V) str.readObject());
}
- final OTreeMapEntry<K, V> middle = createEntry(key, value);
+ final OMVRBTreeEntry<K, V> middle = createEntry(key, value);
// color nodes in non-full bottom most level red
if (level == redLevel)
@@ -2468,7 +2493,7 @@ private final OTreeMapEntry<K, V> buildFromSorted(final int level, final int lo,
}
if (mid < hi) {
- OTreeMapEntry<K, V> right = buildFromSorted(level + 1, mid + 1, hi, redLevel, it, str, defaultVal);
+ OMVRBTreeEntry<K, V> right = buildFromSorted(level + 1, mid + 1, hi, redLevel, it, str, defaultVal);
middle.setRight(right);
right.setParent(middle);
}
@@ -2500,15 +2525,15 @@ public int getPageIndex() {
private void init() {
}
- public OTreeMapEntry<K, V> getRoot() {
+ public OMVRBTreeEntry<K, V> getRoot() {
return root;
}
- protected void printInMemoryStructure(final OTreeMapEntry<K, V> iRootNode) {
+ protected void printInMemoryStructure(final OMVRBTreeEntry<K, V> iRootNode) {
printInMemoryNode("root", iRootNode, 0);
}
- private void printInMemoryNode(final String iLabel, OTreeMapEntry<K, V> iNode, int iLevel) {
+ private void printInMemoryNode(final String iLabel, OMVRBTreeEntry<K, V> iNode, int iLevel) {
if (iNode == null)
return;
@@ -2524,52 +2549,52 @@ private void printInMemoryNode(final String iLabel, OTreeMapEntry<K, V> iNode, i
}
@SuppressWarnings("rawtypes")
- public void checkTreeStructure(final OTreeMapEntry<K, V> iRootNode) {
+ public void checkTreeStructure(final OMVRBTreeEntry<K, V> iRootNode) {
if (!runtimeCheckEnabled || iRootNode == null)
return;
int currPageIndex = pageIndex;
- OTreeMapEntry<K, V> prevNode = null;
+ OMVRBTreeEntry<K, V> prevNode = null;
int i = 0;
- for (OTreeMapEntry<K, V> e = iRootNode.getFirstInMemory(); e != null; e = e.getNextInMemory()) {
+ for (OMVRBTreeEntry<K, V> e = iRootNode.getFirstInMemory(); e != null; e = e.getNextInMemory()) {
if (prevNode != null) {
if (prevNode.getTree() == null)
- OLogManager.instance().error(this, "[OTreeMap.checkTreeStructure] Freed record %d found in memory\n", i);
+ OLogManager.instance().error(this, "[OMVRBTree.checkTreeStructure] Freed record %d found in memory\n", i);
if (((Comparable) e.getFirstKey()).compareTo(((Comparable) e.getLastKey())) > 0) {
- OLogManager.instance().error(this, "[OTreeMap.checkTreeStructure] begin key is > than last key\n", e.getFirstKey(),
+ OLogManager.instance().error(this, "[OMVRBTree.checkTreeStructure] begin key is > than last key\n", e.getFirstKey(),
e.getLastKey());
printInMemoryStructure(iRootNode);
}
if (((Comparable) e.getFirstKey()).compareTo(((Comparable) prevNode.getLastKey())) < 0) {
OLogManager.instance().error(this,
- "[OTreeMap.checkTreeStructure] Node %s starts with a key minor than the last key of the previous node %s\n", e,
+ "[OMVRBTree.checkTreeStructure] Node %s starts with a key minor than the last key of the previous node %s\n", e,
prevNode);
printInMemoryStructure(e.getParentInMemory() != null ? e.getParentInMemory() : e);
}
}
if (e.getLeftInMemory() != null && e.getLeftInMemory() == e) {
- OLogManager.instance().error(this, "[OTreeMap.checkTreeStructure] Node %s has left that points to itself!\n", e);
+ OLogManager.instance().error(this, "[OMVRBTree.checkTreeStructure] Node %s has left that points to itself!\n", e);
printInMemoryStructure(iRootNode);
}
if (e.getRightInMemory() != null && e.getRightInMemory() == e) {
- OLogManager.instance().error(this, "[OTreeMap.checkTreeStructure] Node %s has right that points to itself!\n", e);
+ OLogManager.instance().error(this, "[OMVRBTree.checkTreeStructure] Node %s has right that points to itself!\n", e);
printInMemoryStructure(iRootNode);
}
if (e.getLeftInMemory() != null && e.getLeftInMemory() == e.getRightInMemory()) {
- OLogManager.instance().error(this, "[OTreeMap.checkTreeStructure] Node %s has left and right equals!\n", e);
+ OLogManager.instance().error(this, "[OMVRBTree.checkTreeStructure] Node %s has left and right equals!\n", e);
printInMemoryStructure(iRootNode);
}
if (e.getParentInMemory() != null && e.getParentInMemory().getRightInMemory() != e
&& e.getParentInMemory().getLeftInMemory() != e) {
OLogManager.instance().error(this,
- "[OTreeMap.checkTreeStructure] Node %s is the children of node %s but the cross-reference is missed!\n", e,
+ "[OMVRBTree.checkTreeStructure] Node %s is the children of node %s but the cross-reference is missed!\n", e,
e.getParentInMemory());
printInMemoryStructure(iRootNode);
}
diff --git a/commons/src/main/java/com/orientechnologies/common/collection/OTreeMapEntry.java b/commons/src/main/java/com/orientechnologies/common/collection/OMVRBTreeEntry.java
similarity index 82%
rename from commons/src/main/java/com/orientechnologies/common/collection/OTreeMapEntry.java
rename to commons/src/main/java/com/orientechnologies/common/collection/OMVRBTreeEntry.java
index 35e74c2f810..5948ec13f1a 100644
--- a/commons/src/main/java/com/orientechnologies/common/collection/OTreeMapEntry.java
+++ b/commons/src/main/java/com/orientechnologies/common/collection/OMVRBTreeEntry.java
@@ -18,15 +18,15 @@
import java.util.Map;
@SuppressWarnings("unchecked")
-public abstract class OTreeMapEntry<K, V> implements Map.Entry<K, V> {
- protected OTreeMap<K, V> tree;
+public abstract class OMVRBTreeEntry<K, V> implements Map.Entry<K, V> {
+ protected OMVRBTree<K, V> tree;
protected int size = 1;
protected int pageSize;
protected K[] keys;
protected V[] values;
- protected boolean color = OTreeMap.RED;
+ protected boolean color = OMVRBTree.RED;
private int pageSplitItems;
public static final int BINARY_SEARCH_THRESHOLD = 10;
@@ -35,7 +35,7 @@ public abstract class OTreeMapEntry<K, V> implements Map.Entry<K, V> {
* Constructor called on unmarshalling.
*
*/
- protected OTreeMapEntry(final OTreeMap<K, V> iTree) {
+ protected OMVRBTreeEntry(final OMVRBTree<K, V> iTree) {
this.tree = iTree;
init();
}
@@ -43,7 +43,7 @@ protected OTreeMapEntry(final OTreeMap<K, V> iTree) {
/**
* Make a new cell with given key, value, and parent, and with <tt>null</tt> child links, and BLACK color.
*/
- protected OTreeMapEntry(final OTreeMap<K, V> iTree, final K iKey, final V iValue, final OTreeMapEntry<K, V> iParent) {
+ protected OMVRBTreeEntry(final OMVRBTree<K, V> iTree, final K iKey, final V iValue, final OMVRBTreeEntry<K, V> iParent) {
this.tree = iTree;
setParent(iParent);
this.pageSize = tree.getPageSize();
@@ -61,7 +61,7 @@ protected OTreeMapEntry(final OTreeMap<K, V> iTree, final K iKey, final V iValue
* @param iPosition
* @param iLeft
*/
- protected OTreeMapEntry(final OTreeMapEntry<K, V> iParent, final int iPosition) {
+ protected OMVRBTreeEntry(final OMVRBTreeEntry<K, V> iParent, final int iPosition) {
this.tree = iParent.tree;
this.pageSize = tree.getPageSize();
this.keys = (K[]) new Object[pageSize];
@@ -75,32 +75,32 @@ protected OTreeMapEntry(final OTreeMapEntry<K, V> iParent, final int iPosition)
init();
}
- public abstract void setLeft(OTreeMapEntry<K, V> left);
+ public abstract void setLeft(OMVRBTreeEntry<K, V> left);
- public abstract OTreeMapEntry<K, V> getLeft();
+ public abstract OMVRBTreeEntry<K, V> getLeft();
- public abstract OTreeMapEntry<K, V> setRight(OTreeMapEntry<K, V> right);
+ public abstract OMVRBTreeEntry<K, V> setRight(OMVRBTreeEntry<K, V> right);
- public abstract OTreeMapEntry<K, V> getRight();
+ public abstract OMVRBTreeEntry<K, V> getRight();
- public abstract OTreeMapEntry<K, V> setParent(OTreeMapEntry<K, V> parent);
+ public abstract OMVRBTreeEntry<K, V> setParent(OMVRBTreeEntry<K, V> parent);
- public abstract OTreeMapEntry<K, V> getParent();
+ public abstract OMVRBTreeEntry<K, V> getParent();
- protected abstract OTreeMapEntry<K, V> getLeftInMemory();
+ protected abstract OMVRBTreeEntry<K, V> getLeftInMemory();
- protected abstract OTreeMapEntry<K, V> getParentInMemory();
+ protected abstract OMVRBTreeEntry<K, V> getParentInMemory();
- protected abstract OTreeMapEntry<K, V> getRightInMemory();
+ protected abstract OMVRBTreeEntry<K, V> getRightInMemory();
- protected abstract OTreeMapEntry<K, V> getNextInMemory();
+ protected abstract OMVRBTreeEntry<K, V> getNextInMemory();
/**
* Returns the first Entry only by traversing the memory, or null if no such.
*/
- public OTreeMapEntry<K, V> getFirstInMemory() {
- OTreeMapEntry<K, V> node = this;
- OTreeMapEntry<K, V> prev = this;
+ public OMVRBTreeEntry<K, V> getFirstInMemory() {
+ OMVRBTreeEntry<K, V> node = this;
+ OMVRBTreeEntry<K, V> prev = this;
while (node != null) {
prev = node;
@@ -113,9 +113,9 @@ public OTreeMapEntry<K, V> getFirstInMemory() {
/**
* Returns the previous of the current Entry only by traversing the memory, or null if no such.
*/
- public OTreeMapEntry<K, V> getPreviousInMemory() {
- OTreeMapEntry<K, V> t = this;
- OTreeMapEntry<K, V> p = null;
+ public OMVRBTreeEntry<K, V> getPreviousInMemory() {
+ OMVRBTreeEntry<K, V> t = this;
+ OMVRBTreeEntry<K, V> p = null;
if (t.getLeftInMemory() != null) {
p = t.getLeftInMemory();
@@ -132,13 +132,13 @@ public OTreeMapEntry<K, V> getPreviousInMemory() {
return p;
}
- protected OTreeMap<K, V> getTree() {
+ protected OMVRBTree<K, V> getTree() {
return tree;
}
public int getDepth() {
int level = 0;
- OTreeMapEntry<K, V> entry = this;
+ OMVRBTreeEntry<K, V> entry = this;
while (entry.getParent() != null) {
level++;
entry = entry.getParent();
@@ -382,7 +382,7 @@ public K getFirstKey() {
return getKey(0);
}
- protected void copyFrom(final OTreeMapEntry<K, V> iSource) {
+ protected void copyFrom(final OMVRBTreeEntry<K, V> iSource) {
keys = (K[]) new Object[iSource.keys.length];
for (int i = 0; i < iSource.keys.length; ++i)
keys[i] = iSource.keys[i];
diff --git a/commons/src/main/java/com/orientechnologies/common/collection/OTreeMapEntryMemory.java b/commons/src/main/java/com/orientechnologies/common/collection/OMVRBTreeEntryMemory.java
similarity index 56%
rename from commons/src/main/java/com/orientechnologies/common/collection/OTreeMapEntryMemory.java
rename to commons/src/main/java/com/orientechnologies/common/collection/OMVRBTreeEntryMemory.java
index 771cee6e36e..0a31d9f9269 100644
--- a/commons/src/main/java/com/orientechnologies/common/collection/OTreeMapEntryMemory.java
+++ b/commons/src/main/java/com/orientechnologies/common/collection/OMVRBTreeEntryMemory.java
@@ -15,23 +15,23 @@
*/
package com.orientechnologies.common.collection;
-public class OTreeMapEntryMemory<K, V> extends OTreeMapEntry<K, V> {
- protected OTreeMapEntryMemory<K, V> left = null;
- protected OTreeMapEntryMemory<K, V> right = null;
- protected OTreeMapEntryMemory<K, V> parent;
+public class OMVRBTreeEntryMemory<K, V> extends OMVRBTreeEntry<K, V> {
+ protected OMVRBTreeEntryMemory<K, V> left = null;
+ protected OMVRBTreeEntryMemory<K, V> right = null;
+ protected OMVRBTreeEntryMemory<K, V> parent;
/**
* Constructor called on unmarshalling.
*
*/
- protected OTreeMapEntryMemory(final OTreeMap<K, V> iTree) {
+ protected OMVRBTreeEntryMemory(final OMVRBTree<K, V> iTree) {
super(iTree);
}
/**
* Make a new cell with given key, value, and parent, and with <tt>null</tt> child links, and BLACK color.
*/
- protected OTreeMapEntryMemory(final OTreeMap<K, V> iTree, final K iKey, final V iValue, final OTreeMapEntryMemory<K, V> iParent) {
+ protected OMVRBTreeEntryMemory(final OMVRBTree<K, V> iTree, final K iKey, final V iValue, final OMVRBTreeEntryMemory<K, V> iParent) {
super(iTree, iKey, iValue, iParent);
}
@@ -42,26 +42,26 @@ protected OTreeMapEntryMemory(final OTreeMap<K, V> iTree, final K iKey, final V
* @param iPosition
* @param iLeft
*/
- protected OTreeMapEntryMemory(final OTreeMapEntry<K, V> iParent, final int iPosition) {
+ protected OMVRBTreeEntryMemory(final OMVRBTreeEntry<K, V> iParent, final int iPosition) {
super(iParent, iPosition);
setParent(iParent);
}
@Override
- public void setLeft(final OTreeMapEntry<K, V> left) {
- this.left = (OTreeMapEntryMemory<K, V>) left;
+ public void setLeft(final OMVRBTreeEntry<K, V> left) {
+ this.left = (OMVRBTreeEntryMemory<K, V>) left;
if (left != null && left.getParent() != this)
left.setParent(this);
}
@Override
- public OTreeMapEntry<K, V> getLeft() {
+ public OMVRBTreeEntry<K, V> getLeft() {
return left;
}
@Override
- public OTreeMapEntry<K, V> setRight(final OTreeMapEntry<K, V> right) {
- this.right = (OTreeMapEntryMemory<K, V>) right;
+ public OMVRBTreeEntry<K, V> setRight(final OMVRBTreeEntry<K, V> right) {
+ this.right = (OMVRBTreeEntryMemory<K, V>) right;
if (right != null && right.getParent() != this)
right.setParent(this);
@@ -69,27 +69,27 @@ public OTreeMapEntry<K, V> setRight(final OTreeMapEntry<K, V> right) {
}
@Override
- public OTreeMapEntry<K, V> getRight() {
+ public OMVRBTreeEntry<K, V> getRight() {
return right;
}
@Override
- public OTreeMapEntry<K, V> setParent(final OTreeMapEntry<K, V> parent) {
- this.parent = (OTreeMapEntryMemory<K, V>) parent;
+ public OMVRBTreeEntry<K, V> setParent(final OMVRBTreeEntry<K, V> parent) {
+ this.parent = (OMVRBTreeEntryMemory<K, V>) parent;
return parent;
}
@Override
- public OTreeMapEntry<K, V> getParent() {
+ public OMVRBTreeEntry<K, V> getParent() {
return parent;
}
/**
* Returns the successor of the current Entry only by traversing the memory, or null if no such.
*/
- public OTreeMapEntryMemory<K, V> getNextInMemory() {
- OTreeMapEntryMemory<K, V> t = this;
- OTreeMapEntryMemory<K, V> p = null;
+ public OMVRBTreeEntryMemory<K, V> getNextInMemory() {
+ OMVRBTreeEntryMemory<K, V> t = this;
+ OMVRBTreeEntryMemory<K, V> p = null;
if (t.right != null) {
p = t.right;
@@ -107,17 +107,17 @@ public OTreeMapEntryMemory<K, V> getNextInMemory() {
}
@Override
- protected OTreeMapEntry<K, V> getLeftInMemory() {
+ protected OMVRBTreeEntry<K, V> getLeftInMemory() {
return left;
}
@Override
- protected OTreeMapEntry<K, V> getParentInMemory() {
+ protected OMVRBTreeEntry<K, V> getParentInMemory() {
return parent;
}
@Override
- protected OTreeMapEntry<K, V> getRightInMemory() {
+ protected OMVRBTreeEntry<K, V> getRightInMemory() {
return right;
}
}
\ No newline at end of file
diff --git a/commons/src/main/java/com/orientechnologies/common/collection/OTreeMapEventListener.java b/commons/src/main/java/com/orientechnologies/common/collection/OMVRBTreeEventListener.java
similarity index 81%
rename from commons/src/main/java/com/orientechnologies/common/collection/OTreeMapEventListener.java
rename to commons/src/main/java/com/orientechnologies/common/collection/OMVRBTreeEventListener.java
index aeb6525b69b..a1b4862fbaf 100644
--- a/commons/src/main/java/com/orientechnologies/common/collection/OTreeMapEventListener.java
+++ b/commons/src/main/java/com/orientechnologies/common/collection/OMVRBTreeEventListener.java
@@ -20,8 +20,8 @@
*
* @author Luca Garulli (l.garulli--at--orientechnologies.com)
*/
-public interface OTreeMapEventListener<K, V> {
- public void signalTreeChanged(OTreeMap<K, V> iTree);
+public interface OMVRBTreeEventListener<K, V> {
+ public void signalTreeChanged(OMVRBTree<K, V> iTree);
- public void signalNodeChanged(OTreeMapEntry<K, V> iNode);
+ public void signalNodeChanged(OMVRBTreeEntry<K, V> iNode);
}
diff --git a/commons/src/main/java/com/orientechnologies/common/collection/OTreeMapMemory.java b/commons/src/main/java/com/orientechnologies/common/collection/OMVRBTreeMemory.java
similarity index 65%
rename from commons/src/main/java/com/orientechnologies/common/collection/OTreeMapMemory.java
rename to commons/src/main/java/com/orientechnologies/common/collection/OMVRBTreeMemory.java
index f089a091b99..3353de43801 100644
--- a/commons/src/main/java/com/orientechnologies/common/collection/OTreeMapMemory.java
+++ b/commons/src/main/java/com/orientechnologies/common/collection/OMVRBTreeMemory.java
@@ -20,22 +20,23 @@
import java.util.SortedMap;
@SuppressWarnings("serial")
-public class OTreeMapMemory<K, V> extends OTreeMap<K, V> {
+public class OMVRBTreeMemory<K, V> extends OMVRBTree<K, V> {
/**
- * Constructs a new, empty tree map, using the natural ordering of its keys. All keys inserted into the map must implement the
- * {@link Comparable} interface. Furthermore, all such keys must be <i>mutually comparable</i>: <tt>k1.compareTo(k2)</tt> must not
- * throw a <tt>ClassCastException</tt> for any keys <tt>k1</tt> and <tt>k2</tt> in the map. If the user attempts to put a key into
- * the map that violates this constraint (for example, the user attempts to put a string key into a map whose keys are integers),
- * the <tt>put(Object key, Object value)</tt> call will throw a <tt>ClassCastException</tt>.
+ * Memory based MVRB-Tree implementation. Constructs a new, empty tree map, using the natural ordering of its keys. All keys
+ * inserted into the map must implement the {@link Comparable} interface. Furthermore, all such keys must be <i>mutually
+ * comparable</i>: <tt>k1.compareTo(k2)</tt> must not throw a <tt>ClassCastException</tt> for any keys <tt>k1</tt> and <tt>k2</tt>
+ * in the map. If the user attempts to put a key into the map that violates this constraint (for example, the user attempts to put
+ * a string key into a map whose keys are integers), the <tt>put(Object key, Object value)</tt> call will throw a
+ * <tt>ClassCastException</tt>.
*/
- public OTreeMapMemory() {
+ public OMVRBTreeMemory() {
}
- public OTreeMapMemory(final int iSize, final float iLoadFactor) {
+ public OMVRBTreeMemory(final int iSize, final float iLoadFactor) {
super(iSize, iLoadFactor);
}
- public OTreeMapMemory(final OTreeMapEventListener<K, V> iListener) {
+ public OMVRBTreeMemory(final OMVRBTreeEventListener<K, V> iListener) {
super(iListener);
}
@@ -50,7 +51,7 @@ public OTreeMapMemory(final OTreeMapEventListener<K, V> iListener) {
* the comparator that will be used to order this map. If <tt>null</tt>, the {@linkplain Comparable natural ordering} of
* the keys will be used.
*/
- public OTreeMapMemory(final Comparator<? super K> comparator) {
+ public OMVRBTreeMemory(final Comparator<? super K> comparator) {
super(comparator);
}
@@ -67,7 +68,7 @@ public OTreeMapMemory(final Comparator<? super K> comparator) {
* @throws NullPointerException
* if the specified map is null
*/
- public OTreeMapMemory(final Map<? extends K, ? extends V> m) {
+ public OMVRBTreeMemory(final Map<? extends K, ? extends V> m) {
super(m);
}
@@ -80,17 +81,17 @@ public OTreeMapMemory(final Map<? extends K, ? extends V> m) {
* @throws NullPointerException
* if the specified map is null
*/
- public OTreeMapMemory(final SortedMap<K, ? extends V> m) {
+ public OMVRBTreeMemory(final SortedMap<K, ? extends V> m) {
super(m);
}
@Override
- protected OTreeMapEntry<K, V> createEntry(final K key, final V value) {
- return new OTreeMapEntryMemory<K, V>(this, key, value, null);
+ protected OMVRBTreeEntry<K, V> createEntry(final K key, final V value) {
+ return new OMVRBTreeEntryMemory<K, V>(this, key, value, null);
}
@Override
- protected OTreeMapEntry<K, V> createEntry(final OTreeMapEntry<K, V> parent) {
- return new OTreeMapEntryMemory<K, V>(parent, parent.getPageSplitItems());
+ protected OMVRBTreeEntry<K, V> createEntry(final OMVRBTreeEntry<K, V> parent) {
+ return new OMVRBTreeEntryMemory<K, V>(parent, parent.getPageSplitItems());
}
}
diff --git a/commons/src/main/java/com/orientechnologies/common/collection/OTreeSetMemory.java b/commons/src/main/java/com/orientechnologies/common/collection/OMVRBTreeSetMemory.java
similarity index 90%
rename from commons/src/main/java/com/orientechnologies/common/collection/OTreeSetMemory.java
rename to commons/src/main/java/com/orientechnologies/common/collection/OMVRBTreeSetMemory.java
index 0d07bdb049d..f09e2c4fda2 100644
--- a/commons/src/main/java/com/orientechnologies/common/collection/OTreeSetMemory.java
+++ b/commons/src/main/java/com/orientechnologies/common/collection/OMVRBTreeSetMemory.java
@@ -24,7 +24,7 @@
import java.util.SortedSet;
@SuppressWarnings("unchecked")
-public class OTreeSetMemory<E> extends AbstractSet<E> implements ONavigableSet<E>, Cloneable, java.io.Serializable {
+public class OMVRBTreeSetMemory<E> extends AbstractSet<E> implements ONavigableSet<E>, Cloneable, java.io.Serializable {
/**
* The backing map.
*/
@@ -36,7 +36,7 @@ public class OTreeSetMemory<E> extends AbstractSet<E> implements ONavigableSet<E
/**
* Constructs a set backed by the specified navigable map.
*/
- OTreeSetMemory(ONavigableMap<E, Object> m) {
+ OMVRBTreeSetMemory(ONavigableMap<E, Object> m) {
this.m = m;
}
@@ -47,8 +47,8 @@ public class OTreeSetMemory<E> extends AbstractSet<E> implements ONavigableSet<E
* the user attempts to add an element to the set that violates this constraint (for example, the user attempts to add a string
* element to a set whose elements are integers), the {@code add} call will throw a {@code ClassCastException}.
*/
- public OTreeSetMemory() {
- this(new OTreeMapMemory<E, Object>());
+ public OMVRBTreeSetMemory() {
+ this(new OMVRBTreeMemory<E, Object>());
}
/**
@@ -61,8 +61,8 @@ public OTreeSetMemory() {
* the comparator that will be used to order this set. If {@code null}, the {@linkplain Comparable natural ordering} of
* the elements will be used.
*/
- public OTreeSetMemory(Comparator<? super E> comparator) {
- this(new OTreeMapMemory<E, Object>(comparator));
+ public OMVRBTreeSetMemory(Comparator<? super E> comparator) {
+ this(new OMVRBTreeMemory<E, Object>(comparator));
}
/**
@@ -78,7 +78,7 @@ public OTreeSetMemory(Comparator<? super E> comparator) {
* @throws NullPointerException
* if the specified collection is null
*/
- public OTreeSetMemory(Collection<? extends E> c) {
+ public OMVRBTreeSetMemory(Collection<? extends E> c) {
this();
addAll(c);
}
@@ -91,7 +91,7 @@ public OTreeSetMemory(Collection<? extends E> c) {
* @throws NullPointerException
* if the specified sorted set is null
*/
- public OTreeSetMemory(SortedSet<E> s) {
+ public OMVRBTreeSetMemory(SortedSet<E> s) {
this(s.comparator());
addAll(s);
}
@@ -120,7 +120,7 @@ public Iterator<E> descendingIterator() {
* @since 1.6
*/
public ONavigableSet<E> descendingSet() {
- return new OTreeSetMemory<E>(m.descendingMap());
+ return new OMVRBTreeSetMemory<E>(m.descendingMap());
}
/**
@@ -220,9 +220,9 @@ public void clear() {
@Override
public boolean addAll(Collection<? extends E> c) {
// Use linear-time version if applicable
- if (m.size() == 0 && c.size() > 0 && c instanceof SortedSet && m instanceof OTreeMap) {
+ if (m.size() == 0 && c.size() > 0 && c instanceof SortedSet && m instanceof OMVRBTree) {
SortedSet<? extends E> set = (SortedSet<? extends E>) c;
- OTreeMap<E, Object> map = (OTreeMap<E, Object>) m;
+ OMVRBTree<E, Object> map = (OMVRBTree<E, Object>) m;
Comparator<? super E> cc = (Comparator<? super E>) set.comparator();
Comparator<? super E> mc = map.comparator();
if (cc == mc || (cc != null && cc.equals(mc))) {
@@ -244,7 +244,7 @@ public boolean addAll(Collection<? extends E> c) {
* @since 1.6
*/
public ONavigableSet<E> subSet(E fromElement, boolean fromInclusive, E toElement, boolean toInclusive) {
- return new OTreeSetMemory<E>(m.subMap(fromElement, fromInclusive, toElement, toInclusive));
+ return new OMVRBTreeSetMemory<E>(m.subMap(fromElement, fromInclusive, toElement, toInclusive));
}
/**
@@ -257,7 +257,7 @@ public ONavigableSet<E> subSet(E fromElement, boolean fromInclusive, E toElement
* @since 1.6
*/
public ONavigableSet<E> headSet(E toElement, boolean inclusive) {
- return new OTreeSetMemory<E>(m.headMap(toElement, inclusive));
+ return new OMVRBTreeSetMemory<E>(m.headMap(toElement, inclusive));
}
/**
@@ -270,7 +270,7 @@ public ONavigableSet<E> headSet(E toElement, boolean inclusive) {
* @since 1.6
*/
public ONavigableSet<E> tailSet(E fromElement, boolean inclusive) {
- return new OTreeSetMemory<E>(m.tailMap(fromElement, inclusive));
+ return new OMVRBTreeSetMemory<E>(m.tailMap(fromElement, inclusive));
}
/**
@@ -399,14 +399,14 @@ public E pollLast() {
*/
@Override
public Object clone() {
- OTreeSetMemory<E> clone = null;
+ OMVRBTreeSetMemory<E> clone = null;
try {
- clone = (OTreeSetMemory<E>) super.clone();
+ clone = (OMVRBTreeSetMemory<E>) super.clone();
} catch (CloneNotSupportedException e) {
throw new InternalError();
}
- clone.m = new OTreeMapMemory<E, Object>(m);
+ clone.m = new OMVRBTreeMemory<E, Object>(m);
return clone;
}
@@ -443,12 +443,12 @@ private void readObject(java.io.ObjectInputStream s) throws java.io.IOException,
// Read in Comparator
Comparator<? super E> c = (Comparator<? super E>) s.readObject();
- // Create backing OTreeMap
- OTreeMap<E, Object> tm;
+ // Create backing OMVRBTree
+ OMVRBTree<E, Object> tm;
if (c == null)
- tm = new OTreeMapMemory<E, Object>();
+ tm = new OMVRBTreeMemory<E, Object>();
else
- tm = new OTreeMapMemory<E, Object>(c);
+ tm = new OMVRBTreeMemory<E, Object>(c);
m = tm;
// Read in size
diff --git a/core/src/main/java/com/orientechnologies/orient/core/db/ODatabase.java b/core/src/main/java/com/orientechnologies/orient/core/db/ODatabase.java
index de3c17879c3..50226478abb 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/db/ODatabase.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/db/ODatabase.java
@@ -197,7 +197,7 @@ public interface ODatabase {
public long countClusterElements(String iClusterName);
/**
- * Adds a logical cluster. Logical clusters don't need separate files since are stored inside a OTreeMap instance. Access is
+ * Adds a logical cluster. Logical clusters don't need separate files since are stored inside a OMVRBTree instance. Access is
* slower than the physical cluster but the database size is reduced and less files are requires. This matters in some OS where a
* single process has limitation for the number of files can open. Most accessed entities should be stored inside a physical
* cluster.
diff --git a/core/src/main/java/com/orientechnologies/orient/core/db/kv/OKVDatabase.java b/core/src/main/java/com/orientechnologies/orient/core/db/kv/OKVDatabase.java
index 818c87c50a2..8f9d19af7c2 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/db/kv/OKVDatabase.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/db/kv/OKVDatabase.java
@@ -7,27 +7,27 @@
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializerString;
import com.orientechnologies.orient.core.storage.impl.local.ODictionaryLocal;
-import com.orientechnologies.orient.core.type.tree.OTreeMapDatabase;
+import com.orientechnologies.orient.core.type.tree.OMVRBTreeDatabase;
public class OKVDatabase extends ODatabaseDocumentTx {
public OKVDatabase(final String iURL) {
super(iURL);
}
- public OTreeMapDatabase<String, String> getBucket(final ODatabaseRecordAbstract<ORecordBytes> iDb, final String iBucket)
+ public OMVRBTreeDatabase<String, String> getBucket(final ODatabaseRecordAbstract<ORecordBytes> iDb, final String iBucket)
throws IOException {
ORecordBytes rec = iDb.getDictionary().get(iBucket);
- OTreeMapDatabase<String, String> bucketTree = null;
+ OMVRBTreeDatabase<String, String> bucketTree = null;
if (rec != null) {
- bucketTree = new OTreeMapDatabase<String, String>(iDb, rec.getIdentity());
+ bucketTree = new OMVRBTreeDatabase<String, String>(iDb, rec.getIdentity());
bucketTree.load();
}
if (bucketTree == null) {
// CREATE THE BUCKET
- bucketTree = new OTreeMapDatabase<String, String>(iDb, ODictionaryLocal.DICTIONARY_DEF_CLUSTER_NAME,
+ bucketTree = new OMVRBTreeDatabase<String, String>(iDb, ODictionaryLocal.DICTIONARY_DEF_CLUSTER_NAME,
OStreamSerializerString.INSTANCE, OStreamSerializerString.INSTANCE);
bucketTree.save();
diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyIndexFullText.java b/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyIndexFullText.java
index 91e6a298d28..ab4531e5465 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyIndexFullText.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyIndexFullText.java
@@ -32,7 +32,7 @@
import com.orientechnologies.orient.core.serialization.serializer.OStringSerializerHelper;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializerListRID;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializerString;
-import com.orientechnologies.orient.core.type.tree.OTreeMapDatabaseLazySave;
+import com.orientechnologies.orient.core.type.tree.OMVRBTreeDatabaseLazySave;
/**
* Fast index for full-text searches.
@@ -81,7 +81,7 @@ public OPropertyIndex create(final ODatabaseRecord<?> iDatabase, final OProperty
while (db != null && !(db instanceof ODatabaseRecord<?>))
db = db.getUnderlying();
- map = new OTreeMapDatabaseLazySave<String, List<ORecordId>>((ODatabaseRecord<?>) db, iClusterIndexName,
+ map = new OMVRBTreeDatabaseLazySave<String, List<ORecordId>>((ODatabaseRecord<?>) db, iClusterIndexName,
OStreamSerializerString.INSTANCE, OStreamSerializerListRID.INSTANCE);
map.lazySave();
diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyIndexMVRBTreeAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyIndexMVRBTreeAbstract.java
index 55f3e730cdd..ad7783a6074 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyIndexMVRBTreeAbstract.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/index/OPropertyIndexMVRBTreeAbstract.java
@@ -31,7 +31,7 @@
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializerListRID;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializerString;
-import com.orientechnologies.orient.core.type.tree.OTreeMapDatabaseLazySave;
+import com.orientechnologies.orient.core.type.tree.OMVRBTreeDatabaseLazySave;
/**
* Handles indexing when records change.
@@ -41,7 +41,7 @@
*/
public abstract class OPropertyIndexMVRBTreeAbstract extends OSharedResource implements OPropertyIndex {
protected OProperty owner;
- protected OTreeMapDatabaseLazySave<String, List<ORecordId>> map;
+ protected OMVRBTreeDatabaseLazySave<String, List<ORecordId>> map;
public OPropertyIndexMVRBTreeAbstract() {
}
@@ -75,7 +75,7 @@ public OPropertyIndexMVRBTreeAbstract(final ODatabaseRecord<?> iDatabase, final
public OPropertyIndex create(final ODatabaseRecord<?> iDatabase, final OProperty iProperty, final String iClusterIndexName,
final OProgressListener iProgressListener) {
owner = iProperty;
- map = new OTreeMapDatabaseLazySave<String, List<ORecordId>>(iDatabase, iClusterIndexName, OStreamSerializerString.INSTANCE,
+ map = new OMVRBTreeDatabaseLazySave<String, List<ORecordId>>(iDatabase, iClusterIndexName, OStreamSerializerString.INSTANCE,
OStreamSerializerListRID.INSTANCE);
rebuild(iProgressListener);
return this;
@@ -233,7 +233,7 @@ public Iterator<Entry<String, List<ORecordId>>> iterator() {
}
protected void init(final ODatabaseRecord<?> iDatabase, final ORID iRecordId) {
- map = new OTreeMapDatabaseLazySave<String, List<ORecordId>>(iDatabase, iRecordId);
+ map = new OMVRBTreeDatabaseLazySave<String, List<ORecordId>>(iDatabase, iRecordId);
map.load();
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OClusterLogical.java b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OClusterLogical.java
index d83295720ac..b2bb660e3f8 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OClusterLogical.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/OClusterLogical.java
@@ -26,7 +26,7 @@
import com.orientechnologies.orient.core.storage.OCluster;
import com.orientechnologies.orient.core.storage.OClusterPositionIterator;
import com.orientechnologies.orient.core.storage.OPhysicalPosition;
-import com.orientechnologies.orient.core.storage.tree.OTreeMapStorage;
+import com.orientechnologies.orient.core.storage.tree.OMVRBTreeStorage;
/**
* Handle a cluster using a logical structure stored into a real physical local cluster.<br/>
@@ -39,7 +39,7 @@ public class OClusterLogical implements OCluster {
private int id;
private int localClusterId;
- private OTreeMapStorage<Long, OPhysicalPosition> map;
+ private OMVRBTreeStorage<Long, OPhysicalPosition> map;
private OPhysicalPosition total;
private OSharedResourceExternal lock = new OSharedResourceExternal();
@@ -61,7 +61,7 @@ public OClusterLogical(final OStorageLocal iStorage, final int iId, final String
this(iName, iId, iPhysicalClusterId);
try {
- map = new OTreeMapStorage<Long, OPhysicalPosition>(iStorage, iStorage.getClusterById(iPhysicalClusterId).getName(),
+ map = new OMVRBTreeStorage<Long, OPhysicalPosition>(iStorage, iStorage.getClusterById(iPhysicalClusterId).getName(),
OStreamSerializerLong.INSTANCE, OStreamSerializerAnyStreamable.INSTANCE);
map.getRecord().setIdentity(iPhysicalClusterId, ORID.CLUSTER_POS_INVALID);
@@ -87,7 +87,7 @@ public OClusterLogical(final OStorageLocal iStorage, final String iName, final i
this(iName, iId, 0);
try {
- map = new OTreeMapStorage<Long, OPhysicalPosition>(iStorage, iStorage.getClusterById(iRecordId.getClusterId()).getName(),
+ map = new OMVRBTreeStorage<Long, OPhysicalPosition>(iStorage, iStorage.getClusterById(iRecordId.getClusterId()).getName(),
iRecordId);
map.load();
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/ODictionaryLocal.java b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/ODictionaryLocal.java
index 72a736a3874..cb87386e8c2 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/ODictionaryLocal.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/ODictionaryLocal.java
@@ -31,14 +31,14 @@
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializerAnyRecord;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializerString;
import com.orientechnologies.orient.core.storage.OStorage;
-import com.orientechnologies.orient.core.type.tree.OTreeMapDatabase;
+import com.orientechnologies.orient.core.type.tree.OMVRBTreeDatabase;
@SuppressWarnings("unchecked")
public class ODictionaryLocal<T extends Object> implements ODictionaryInternal<T> {
public static final String DICTIONARY_DEF_CLUSTER_NAME = OStorage.CLUSTER_INTERNAL_NAME;
private ODatabaseComplex<T> database;
- private OTreeMapDatabase<String, T> tree;
+ private OMVRBTreeDatabase<String, T> tree;
public String clusterName = DICTIONARY_DEF_CLUSTER_NAME;
@@ -75,14 +75,14 @@ public int size() {
}
public void load() {
- tree = new OTreeMapDatabase<String, T>((ODatabaseRecord<?>) database, new ORecordId(
+ tree = new OMVRBTreeDatabase<String, T>((ODatabaseRecord<?>) database, new ORecordId(
database.getStorage().getConfiguration().dictionaryRecordId));
tree.load();
}
public void create() {
try {
- tree = new OTreeMapDatabase<String, T>((ODatabaseRecord<?>) database, clusterName, OStreamSerializerString.INSTANCE,
+ tree = new OMVRBTreeDatabase<String, T>((ODatabaseRecord<?>) database, clusterName, OStreamSerializerString.INSTANCE,
new OStreamSerializerAnyRecord((ODatabaseRecord<? extends ORecord<?>>) database));
tree.save();
@@ -93,7 +93,7 @@ public void create() {
}
}
- public OTreeMapDatabase<String, T> getTree() {
+ public OMVRBTreeDatabase<String, T> getTree() {
return tree;
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/tree/OTreeMapEntryStorage.java b/core/src/main/java/com/orientechnologies/orient/core/storage/tree/OMVRBTreeEntryStorage.java
similarity index 62%
rename from core/src/main/java/com/orientechnologies/orient/core/storage/tree/OTreeMapEntryStorage.java
rename to core/src/main/java/com/orientechnologies/orient/core/storage/tree/OMVRBTreeEntryStorage.java
index 39b59a4b6b9..2c6f4d31e19 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/tree/OTreeMapEntryStorage.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/tree/OMVRBTreeEntryStorage.java
@@ -17,11 +17,11 @@
import java.io.IOException;
-import com.orientechnologies.common.collection.OTreeMapEntry;
+import com.orientechnologies.common.collection.OMVRBTreeEntry;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.storage.ORawBuffer;
-import com.orientechnologies.orient.core.type.tree.OTreeMapEntryPersistent;
-import com.orientechnologies.orient.core.type.tree.OTreeMapPersistent;
+import com.orientechnologies.orient.core.type.tree.OMVRBTreeEntryPersistent;
+import com.orientechnologies.orient.core.type.tree.OMVRBTreePersistent;
/**
* Persistent TreeMap implementation that use a OStorage instance to handle the entries. This class can't be used also from the
@@ -34,27 +34,27 @@
* @param <V>
* Value type
*/
-public class OTreeMapEntryStorage<K, V> extends OTreeMapEntryPersistent<K, V> {
+public class OMVRBTreeEntryStorage<K, V> extends OMVRBTreeEntryPersistent<K, V> {
- public OTreeMapEntryStorage(OTreeMapEntry<K, V> iParent, int iPosition) {
+ public OMVRBTreeEntryStorage(OMVRBTreeEntry<K, V> iParent, int iPosition) {
super(iParent, iPosition);
record.setIdentity(pTree.getRecord().getIdentity().getClusterId(), ORID.CLUSTER_POS_INVALID);
}
- public OTreeMapEntryStorage(OTreeMapPersistent<K, V> iTree, K key, V value, OTreeMapEntryPersistent<K, V> iParent) {
+ public OMVRBTreeEntryStorage(OMVRBTreePersistent<K, V> iTree, K key, V value, OMVRBTreeEntryPersistent<K, V> iParent) {
super(iTree, key, value, iParent);
record.setIdentity(pTree.getRecord().getIdentity().getClusterId(), ORID.CLUSTER_POS_INVALID);
}
- public OTreeMapEntryStorage(OTreeMapPersistent<K, V> iTree, OTreeMapEntryPersistent<K, V> iParent, ORID iRecordId)
+ public OMVRBTreeEntryStorage(OMVRBTreePersistent<K, V> iTree, OMVRBTreeEntryPersistent<K, V> iParent, ORID iRecordId)
throws IOException {
super(iTree, iParent, iRecordId);
load();
}
@Override
- public OTreeMapEntryStorage<K, V> load() throws IOException {
- ORawBuffer raw = ((OTreeMapStorage<K, V>) tree).storage.readRecord(null, -1, record.getIdentity().getClusterId(), record
+ public OMVRBTreeEntryStorage<K, V> load() throws IOException {
+ ORawBuffer raw = ((OMVRBTreeStorage<K, V>) tree).storage.readRecord(null, -1, record.getIdentity().getClusterId(), record
.getIdentity().getClusterPosition(), null);
record.setVersion(raw.version);
@@ -65,18 +65,18 @@ public OTreeMapEntryStorage<K, V> load() throws IOException {
}
@Override
- public OTreeMapEntryStorage<K, V> save() throws IOException {
+ public OMVRBTreeEntryStorage<K, V> save() throws IOException {
record.fromStream(toStream());
if (record.getIdentity().isValid())
// UPDATE IT WITHOUT VERSION CHECK SINCE ALL IT'S LOCKED
- record.setVersion(((OTreeMapStorage<K, V>) tree).storage.updateRecord(0, record.getIdentity().getClusterId(), record
+ record.setVersion(((OMVRBTreeStorage<K, V>) tree).storage.updateRecord(0, record.getIdentity().getClusterId(), record
.getIdentity().getClusterPosition(), record.toStream(), -1, record.getRecordType()));
else {
// CREATE IT
record.setIdentity(
record.getIdentity().getClusterId(),
- ((OTreeMapStorage<K, V>) tree).storage.createRecord(record.getIdentity().getClusterId(), record.toStream(),
+ ((OMVRBTreeStorage<K, V>) tree).storage.createRecord(record.getIdentity().getClusterId(), record.toStream(),
record.getRecordType()));
}
record.unsetDirty();
@@ -91,17 +91,17 @@ public OTreeMapEntryStorage<K, V> save() throws IOException {
*
* @throws IOException
*/
- public OTreeMapEntryStorage<K, V> delete() throws IOException {
+ public OMVRBTreeEntryStorage<K, V> delete() throws IOException {
// EARLY LOAD LEFT AND DELETE IT RECURSIVELY
if (getLeft() != null)
- ((OTreeMapEntryPersistent<K, V>) getLeft()).delete();
+ ((OMVRBTreeEntryPersistent<K, V>) getLeft()).delete();
// EARLY LOAD RIGHT AND DELETE IT RECURSIVELY
if (getRight() != null)
- ((OTreeMapEntryPersistent<K, V>) getRight()).delete();
+ ((OMVRBTreeEntryPersistent<K, V>) getRight()).delete();
// DELETE MYSELF
- ((OTreeMapStorage<K, V>) tree).storage.deleteRecord(0, record.getIdentity(), record.getVersion());
+ ((OMVRBTreeStorage<K, V>) tree).storage.deleteRecord(0, record.getIdentity(), record.getVersion());
// FORCE REMOVING OF K/V AND SEIALIZED K/V AS WELL
keys = null;
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/tree/OTreeMapStorage.java b/core/src/main/java/com/orientechnologies/orient/core/storage/tree/OMVRBTreeStorage.java
similarity index 71%
rename from core/src/main/java/com/orientechnologies/orient/core/storage/tree/OTreeMapStorage.java
rename to core/src/main/java/com/orientechnologies/orient/core/storage/tree/OMVRBTreeStorage.java
index 6bdca91ed28..cb346b89e58 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/tree/OTreeMapStorage.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/tree/OMVRBTreeStorage.java
@@ -17,7 +17,7 @@
import java.io.IOException;
-import com.orientechnologies.common.collection.OTreeMapEntry;
+import com.orientechnologies.common.collection.OMVRBTreeEntry;
import com.orientechnologies.orient.core.exception.OConfigurationException;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.serialization.OMemoryInputStream;
@@ -26,28 +26,28 @@
import com.orientechnologies.orient.core.storage.ORawBuffer;
import com.orientechnologies.orient.core.storage.impl.local.OClusterLogical;
import com.orientechnologies.orient.core.storage.impl.local.OStorageLocal;
-import com.orientechnologies.orient.core.type.tree.OTreeMapEntryPersistent;
-import com.orientechnologies.orient.core.type.tree.OTreeMapPersistent;
+import com.orientechnologies.orient.core.type.tree.OMVRBTreeEntryPersistent;
+import com.orientechnologies.orient.core.type.tree.OMVRBTreePersistent;
/**
- * Persistent TreeMap implementation. The difference with the class OTreeMapPersistent is the level. In facts this class works
+ * Persistent MVRB-Tree implementation. The difference with the class OMVRBTreeDatabase is the level. In facts this class works
* directly at the storage level, while the other at database level. This class is used for Logical Clusters. It can'be
* transactional.
*
* @see OClusterLogical
*/
@SuppressWarnings("serial")
-public class OTreeMapStorage<K, V> extends OTreeMapPersistent<K, V> {
+public class OMVRBTreeStorage<K, V> extends OMVRBTreePersistent<K, V> {
protected OStorageLocal storage;
int clusterId;
- public OTreeMapStorage(final OStorageLocal iStorage, final String iClusterName, final ORID iRID) {
+ public OMVRBTreeStorage(final OStorageLocal iStorage, final String iClusterName, final ORID iRID) {
super(iClusterName, iRID);
storage = iStorage;
clusterId = storage.getClusterIdByName(OStorageLocal.CLUSTER_INTERNAL_NAME);
}
- public OTreeMapStorage(final OStorageLocal iStorage, String iClusterName, final OStreamSerializer iKeySerializer,
+ public OMVRBTreeStorage(final OStorageLocal iStorage, String iClusterName, final OStreamSerializer iKeySerializer,
final OStreamSerializer iValueSerializer) {
super(iClusterName, iKeySerializer, iValueSerializer);
storage = iStorage;
@@ -55,23 +55,23 @@ public OTreeMapStorage(final OStorageLocal iStorage, String iClusterName, final
}
@Override
- protected OTreeMapEntryPersistent<K, V> createEntry(OTreeMapEntry<K, V> iParent) {
- return new OTreeMapEntryStorage<K, V>(iParent, iParent.getPageSplitItems());
+ protected OMVRBTreeEntryPersistent<K, V> createEntry(OMVRBTreeEntry<K, V> iParent) {
+ return new OMVRBTreeEntryStorage<K, V>(iParent, iParent.getPageSplitItems());
}
@Override
- protected OTreeMapEntryPersistent<K, V> createEntry(final K key, final V value) {
+ protected OMVRBTreeEntryPersistent<K, V> createEntry(final K key, final V value) {
adjustPageSize();
- return new OTreeMapEntryStorage<K, V>(this, key, value, null);
+ return new OMVRBTreeEntryStorage<K, V>(this, key, value, null);
}
@Override
- protected OTreeMapEntryStorage<K, V> loadEntry(OTreeMapEntryPersistent<K, V> iParent, ORID iRecordId) throws IOException {
- OTreeMapEntryStorage<K, V> entry = null;//(OTreeMapEntryStorage<K, V>) cache.get(iRecordId);
+ protected OMVRBTreeEntryStorage<K, V> loadEntry(OMVRBTreeEntryPersistent<K, V> iParent, ORID iRecordId) throws IOException {
+ OMVRBTreeEntryStorage<K, V> entry = null;// (OMVRBTreeEntryStorage<K, V>) cache.get(iRecordId);
if (entry == null) {
// NOT FOUND: CREATE IT AND PUT IT INTO THE CACHE
- entry = new OTreeMapEntryStorage<K, V>(this, iParent, iRecordId);
-// cache.put(iRecordId, entry);
+ entry = new OMVRBTreeEntryStorage<K, V>(this, iParent, iRecordId);
+ // cache.put(iRecordId, entry);
} else
// FOUND: ASSIGN IT
entry.setParent(iParent);
@@ -80,7 +80,7 @@ protected OTreeMapEntryStorage<K, V> loadEntry(OTreeMapEntryPersistent<K, V> iPa
}
@Override
- public OTreeMapPersistent<K, V> load() throws IOException {
+ public OMVRBTreePersistent<K, V> load() throws IOException {
lock.acquireExclusiveLock();
try {
@@ -101,7 +101,7 @@ public OTreeMapPersistent<K, V> load() throws IOException {
}
@Override
- public OTreeMapPersistent<K, V> save() throws IOException {
+ public OMVRBTreePersistent<K, V> save() throws IOException {
lock.acquireExclusiveLock();
try {
diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OTreeMapDatabase.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeDatabase.java
similarity index 64%
rename from core/src/main/java/com/orientechnologies/orient/core/type/tree/OTreeMapDatabase.java
rename to core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeDatabase.java
index 891a928909b..2139ee62b79 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OTreeMapDatabase.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeDatabase.java
@@ -17,24 +17,29 @@
import java.io.IOException;
-import com.orientechnologies.common.collection.OTreeMapEntry;
+import com.orientechnologies.common.collection.OMVRBTreeEntry;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.serialization.OMemoryInputStream;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializer;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializerFactory;
+/**
+ * Persistent MVRB-Tree implementation. The difference with the class OMVRBTreeStorage is the level. In facts this class works
+ * directly at the database level, while the other at storage level.
+ *
+ */
@SuppressWarnings("serial")
-public class OTreeMapDatabase<K, V> extends OTreeMapPersistent<K, V> {
+public class OMVRBTreeDatabase<K, V> extends OMVRBTreePersistent<K, V> {
protected ODatabaseRecord<?> database;
- public OTreeMapDatabase(final ODatabaseRecord<?> iDatabase, final ORID iRID) {
+ public OMVRBTreeDatabase(final ODatabaseRecord<?> iDatabase, final ORID iRID) {
super(iDatabase.getClusterNameById(iRID.getClusterId()), iRID);
database = iDatabase;
record.setDatabase(iDatabase);
}
- public OTreeMapDatabase(final ODatabaseRecord<?> iDatabase, String iClusterName, final OStreamSerializer iKeySerializer,
+ public OMVRBTreeDatabase(final ODatabaseRecord<?> iDatabase, String iClusterName, final OStreamSerializer iKeySerializer,
final OStreamSerializer iValueSerializer) {
super(iClusterName, iKeySerializer, iValueSerializer);
database = iDatabase;
@@ -42,26 +47,26 @@ public OTreeMapDatabase(final ODatabaseRecord<?> iDatabase, String iClusterName,
}
@Override
- protected OTreeMapEntryDatabase<K, V> createEntry(final K key, final V value) {
+ protected OMVRBTreeEntryDatabase<K, V> createEntry(final K key, final V value) {
adjustPageSize();
- return new OTreeMapEntryDatabase<K, V>(this, key, value, null);
+ return new OMVRBTreeEntryDatabase<K, V>(this, key, value, null);
}
@Override
- protected OTreeMapEntryDatabase<K, V> createEntry(final OTreeMapEntry<K, V> parent) {
+ protected OMVRBTreeEntryDatabase<K, V> createEntry(final OMVRBTreeEntry<K, V> parent) {
adjustPageSize();
- return new OTreeMapEntryDatabase<K, V>(parent, parent.getPageSplitItems());
+ return new OMVRBTreeEntryDatabase<K, V>(parent, parent.getPageSplitItems());
}
@Override
- protected OTreeMapEntryDatabase<K, V> loadEntry(final OTreeMapEntryPersistent<K, V> iParent, final ORID iRecordId)
+ protected OMVRBTreeEntryDatabase<K, V> loadEntry(final OMVRBTreeEntryPersistent<K, V> iParent, final ORID iRecordId)
throws IOException {
// SEARCH INTO THE CACHE
- OTreeMapEntryDatabase<K, V> entry = null;// (OTreeMapEntryDatabase<K, V>) cache.get(iRecordId);
+ OMVRBTreeEntryDatabase<K, V> entry = null;// (OMVRBTreeEntryDatabase<K, V>) cache.get(iRecordId);
if (entry == null) {
// NOT FOUND: CREATE IT AND PUT IT INTO THE CACHE
- entry = new OTreeMapEntryDatabase<K, V>(this, (OTreeMapEntryDatabase<K, V>) iParent, iRecordId);
+ entry = new OMVRBTreeEntryDatabase<K, V>(this, (OMVRBTreeEntryDatabase<K, V>) iParent, iRecordId);
// cache.put(iRecordId, entry);
} else {
// entry.load();
@@ -78,7 +83,7 @@ public ODatabaseRecord<?> getDatabase() {
}
@Override
- public OTreeMapPersistent<K, V> load() {
+ public OMVRBTreePersistent<K, V> load() {
if (!record.getIdentity().isValid())
// NOTHING TO LOAD
return this;
@@ -98,7 +103,7 @@ public OTreeMapPersistent<K, V> load() {
}
@Override
- public OTreeMapPersistent<K, V> save() throws IOException {
+ public OMVRBTreePersistent<K, V> save() throws IOException {
lock.acquireExclusiveLock();
try {
diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OTreeMapDatabaseLazySave.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeDatabaseLazySave.java
similarity index 88%
rename from core/src/main/java/com/orientechnologies/orient/core/type/tree/OTreeMapDatabaseLazySave.java
rename to core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeDatabaseLazySave.java
index 60b2c448337..0e5949e9969 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OTreeMapDatabaseLazySave.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeDatabaseLazySave.java
@@ -33,16 +33,16 @@
* @author Luca Garulli
*/
@SuppressWarnings("serial")
-public class OTreeMapDatabaseLazySave<K, V> extends OTreeMapDatabase<K, V> implements ODatabaseLifecycleListener {
+public class OMVRBTreeDatabaseLazySave<K, V> extends OMVRBTreeDatabase<K, V> implements ODatabaseLifecycleListener {
protected int maxUpdatesBeforeSave;
protected int updates = 0;
- public OTreeMapDatabaseLazySave(ODatabaseRecord<?> iDatabase, ORID iRID) {
+ public OMVRBTreeDatabaseLazySave(ODatabaseRecord<?> iDatabase, ORID iRID) {
super(iDatabase, iRID);
init(iDatabase);
}
- public OTreeMapDatabaseLazySave(ODatabaseRecord<?> iDatabase, String iClusterName, OStreamSerializer iKeySerializer,
+ public OMVRBTreeDatabaseLazySave(ODatabaseRecord<?> iDatabase, String iClusterName, OStreamSerializer iKeySerializer,
OStreamSerializer iValueSerializer) {
super(iDatabase, iClusterName, iKeySerializer, iValueSerializer);
init(iDatabase);
@@ -78,7 +78,7 @@ public void onTxRollback(ODatabase iDatabase) {
entryPoints.clear();
try {
if (root != null)
- ((OTreeMapEntryDatabase<K, V>) root).load();
+ ((OMVRBTreeEntryDatabase<K, V>) root).load();
} catch (IOException e) {
throw new OIndexException("Error on loading root node");
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OTreeMapEntryDatabase.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeEntryDatabase.java
similarity index 74%
rename from core/src/main/java/com/orientechnologies/orient/core/type/tree/OTreeMapEntryDatabase.java
rename to core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeEntryDatabase.java
index 766dba45260..e2b159ed23b 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OTreeMapEntryDatabase.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeEntryDatabase.java
@@ -17,7 +17,7 @@
import java.io.IOException;
-import com.orientechnologies.common.collection.OTreeMapEntry;
+import com.orientechnologies.common.collection.OMVRBTreeEntry;
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.orient.core.exception.OSerializationException;
import com.orientechnologies.orient.core.id.ORID;
@@ -33,7 +33,7 @@
* @param <V>
* Value type
*/
-public class OTreeMapEntryDatabase<K, V> extends OTreeMapEntryPersistent<K, V> {
+public class OMVRBTreeEntryDatabase<K, V> extends OMVRBTreeEntryPersistent<K, V> {
/**
* Called on event of splitting an entry.
*
@@ -43,9 +43,9 @@ public class OTreeMapEntryDatabase<K, V> extends OTreeMapEntryPersistent<K, V> {
* Current position
* @param iLeft
*/
- public OTreeMapEntryDatabase(OTreeMapEntry<K, V> iParent, int iPosition) {
+ public OMVRBTreeEntryDatabase(OMVRBTreeEntry<K, V> iParent, int iPosition) {
super(iParent, iPosition);
- record.setDatabase(((OTreeMapDatabase<K, V>) pTree).database);
+ record.setDatabase(((OMVRBTreeDatabase<K, V>) pTree).database);
}
/**
@@ -58,27 +58,27 @@ public OTreeMapEntryDatabase(OTreeMapEntry<K, V> iParent, int iPosition) {
* @param iRecordId
* Record to unmarshall
*/
- public OTreeMapEntryDatabase(OTreeMapDatabase<K, V> iTree, OTreeMapEntryDatabase<K, V> iParent, ORID iRecordId)
+ public OMVRBTreeEntryDatabase(OMVRBTreeDatabase<K, V> iTree, OMVRBTreeEntryDatabase<K, V> iParent, ORID iRecordId)
throws IOException {
super(iTree, iParent, iRecordId);
record.setDatabase(iTree.database);
load();
}
- public OTreeMapEntryDatabase(OTreeMapDatabase<K, V> iTree, K key, V value, OTreeMapEntryDatabase<K, V> iParent) {
+ public OMVRBTreeEntryDatabase(OMVRBTreeDatabase<K, V> iTree, K key, V value, OMVRBTreeEntryDatabase<K, V> iParent) {
super(iTree, key, value, iParent);
record.setDatabase(iTree.database);
}
@Override
- public OTreeMapEntryDatabase<K, V> load() throws IOException {
+ public OMVRBTreeEntryDatabase<K, V> load() throws IOException {
record.load();
fromStream(record.toStream());
return this;
}
@Override
- public OTreeMapEntryDatabase<K, V> save() throws OSerializationException {
+ public OMVRBTreeEntryDatabase<K, V> save() throws OSerializationException {
if (!record.isDirty())
return this;
@@ -102,15 +102,15 @@ public OTreeMapEntryDatabase<K, V> save() throws OSerializationException {
*
* @throws IOException
*/
- public OTreeMapEntryDatabase<K, V> delete() throws IOException {
+ public OMVRBTreeEntryDatabase<K, V> delete() throws IOException {
// EARLY LOAD LEFT AND DELETE IT RECURSIVELY
if (getLeft() != null)
- ((OTreeMapEntryPersistent<K, V>) getLeft()).delete();
+ ((OMVRBTreeEntryPersistent<K, V>) getLeft()).delete();
leftRid = null;
// EARLY LOAD RIGHT AND DELETE IT RECURSIVELY
if (getRight() != null)
- ((OTreeMapEntryPersistent<K, V>) getRight()).delete();
+ ((OMVRBTreeEntryPersistent<K, V>) getRight()).delete();
rightRid = null;
// DELETE MYSELF
diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OTreeMapEntryPersistent.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeEntryPersistent.java
similarity index 83%
rename from core/src/main/java/com/orientechnologies/orient/core/type/tree/OTreeMapEntryPersistent.java
rename to core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeEntryPersistent.java
index 183fc89f875..d82791166c1 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OTreeMapEntryPersistent.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeEntryPersistent.java
@@ -18,7 +18,7 @@
import java.io.IOException;
import java.util.Set;
-import com.orientechnologies.common.collection.OTreeMapEntry;
+import com.orientechnologies.common.collection.OMVRBTreeEntry;
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.common.profiler.OProfiler;
import com.orientechnologies.orient.core.exception.OConfigurationException;
@@ -80,8 +80,8 @@
* @param <V>
*/
@SuppressWarnings("unchecked")
-public abstract class OTreeMapEntryPersistent<K, V> extends OTreeMapEntry<K, V> implements OSerializableStream {
- protected OTreeMapPersistent<K, V> pTree;
+public abstract class OMVRBTreeEntryPersistent<K, V> extends OMVRBTreeEntry<K, V> implements OSerializableStream {
+ protected OMVRBTreePersistent<K, V> pTree;
byte[][] serializedKeys;
byte[][] serializedValues;
@@ -92,9 +92,9 @@ public abstract class OTreeMapEntryPersistent<K, V> extends OTreeMapEntry<K, V>
public ORecordBytesLazy record;
- protected OTreeMapEntryPersistent<K, V> parent;
- protected OTreeMapEntryPersistent<K, V> left;
- protected OTreeMapEntryPersistent<K, V> right;
+ protected OMVRBTreeEntryPersistent<K, V> parent;
+ protected OMVRBTreeEntryPersistent<K, V> left;
+ protected OMVRBTreeEntryPersistent<K, V> right;
/**
* Called on event of splitting an entry.
@@ -105,9 +105,9 @@ public abstract class OTreeMapEntryPersistent<K, V> extends OTreeMapEntry<K, V>
* Current position
* @param iLeft
*/
- public OTreeMapEntryPersistent(final OTreeMapEntry<K, V> iParent, final int iPosition) {
+ public OMVRBTreeEntryPersistent(final OMVRBTreeEntry<K, V> iParent, final int iPosition) {
super(iParent, iPosition);
- pTree = (OTreeMapPersistent<K, V>) tree;
+ pTree = (OMVRBTreePersistent<K, V>) tree;
record = new ORecordBytesLazy(this);
setParent(iParent);
@@ -122,7 +122,7 @@ record = new ORecordBytesLazy(this);
serializedKeys = new byte[pageSize][];
serializedValues = new byte[pageSize][];
- final OTreeMapEntryPersistent<K, V> p = (OTreeMapEntryPersistent<K, V>) iParent;
+ final OMVRBTreeEntryPersistent<K, V> p = (OMVRBTreeEntryPersistent<K, V>) iParent;
System.arraycopy(p.serializedKeys, iPosition, serializedKeys, 0, size);
System.arraycopy(p.serializedValues, iPosition, serializedValues, 0, size);
@@ -140,19 +140,19 @@ record = new ORecordBytesLazy(this);
* @param iRecordId
* Record to unmarshall
*/
- public OTreeMapEntryPersistent(final OTreeMapPersistent<K, V> iTree, final OTreeMapEntryPersistent<K, V> iParent,
+ public OMVRBTreeEntryPersistent(final OMVRBTreePersistent<K, V> iTree, final OMVRBTreeEntryPersistent<K, V> iParent,
final ORID iRecordId) throws IOException {
super(iTree);
pTree = iTree;
record = new ORecordBytesLazy(this);
record.setIdentity((ORecordId) iRecordId);
- parent = (OTreeMapEntryPersistent<K, V>) iParent;
+ parent = (OMVRBTreeEntryPersistent<K, V>) iParent;
parentRid = iParent == null ? ORecordId.EMPTY_RECORD_ID : parent.record.getIdentity();
}
- public OTreeMapEntryPersistent(final OTreeMapPersistent<K, V> iTree, final K key, final V value,
- final OTreeMapEntryPersistent<K, V> iParent) {
+ public OMVRBTreeEntryPersistent(final OMVRBTreePersistent<K, V> iTree, final K key, final V value,
+ final OMVRBTreeEntryPersistent<K, V> iParent) {
super(iTree, key, value, iParent);
pTree = iTree;
@@ -170,22 +170,22 @@ record = new ORecordBytesLazy(this);
markDirty();
}
- public OTreeMapEntryPersistent<K, V> load() throws IOException {
+ public OMVRBTreeEntryPersistent<K, V> load() throws IOException {
return this;
}
- public OTreeMapEntryPersistent<K, V> save() throws IOException {
+ public OMVRBTreeEntryPersistent<K, V> save() throws IOException {
return this;
}
- public OTreeMapEntryPersistent<K, V> delete() throws IOException {
+ public OMVRBTreeEntryPersistent<K, V> delete() throws IOException {
pTree.removeEntryPoint(this);
// if (record.getIdentity().isValid())
// pTree.cache.remove(record.getIdentity());
// DELETE THE NODE FROM THE PENDING RECORDS TO COMMIT
- for (OTreeMapEntryPersistent<K, V> node : pTree.recordsToCommit) {
+ for (OMVRBTreeEntryPersistent<K, V> node : pTree.recordsToCommit) {
if (node.record.getIdentity().equals(record.getIdentity())) {
pTree.recordsToCommit.remove(node);
break;
@@ -274,10 +274,10 @@ protected int checkToDisconnect(final int iDepthLevel) {
public int getDepthInMemory() {
int level = 0;
- OTreeMapEntryPersistent<K, V> entry = this;
+ OMVRBTreeEntryPersistent<K, V> entry = this;
while (entry.parent != null) {
level++;
- entry = (OTreeMapEntryPersistent<K, V>) entry.parent;
+ entry = (OMVRBTreeEntryPersistent<K, V>) entry.parent;
}
return level;
}
@@ -285,16 +285,16 @@ public int getDepthInMemory() {
@Override
public int getDepth() {
int level = 0;
- OTreeMapEntryPersistent<K, V> entry = this;
+ OMVRBTreeEntryPersistent<K, V> entry = this;
while (entry.getParent() != null) {
level++;
- entry = (OTreeMapEntryPersistent<K, V>) entry.getParent();
+ entry = (OMVRBTreeEntryPersistent<K, V>) entry.getParent();
}
return level;
}
@Override
- public OTreeMapEntry<K, V> getParent() {
+ public OMVRBTreeEntry<K, V> getParent() {
if (parentRid == null)
return null;
@@ -331,11 +331,11 @@ else if (parent.rightRid.isValid() && parent.rightRid.equals(record.getIdentity(
}
@Override
- public OTreeMapEntry<K, V> setParent(final OTreeMapEntry<K, V> iParent) {
+ public OMVRBTreeEntry<K, V> setParent(final OMVRBTreeEntry<K, V> iParent) {
if (iParent != getParent()) {
markDirty();
- this.parent = (OTreeMapEntryPersistent<K, V>) iParent;
+ this.parent = (OMVRBTreeEntryPersistent<K, V>) iParent;
this.parentRid = iParent == null ? ORecordId.EMPTY_RECORD_ID : parent.record.getIdentity();
if (parent != null) {
@@ -353,7 +353,7 @@ public OTreeMapEntry<K, V> setParent(final OTreeMapEntry<K, V> iParent) {
}
@Override
- public OTreeMapEntry<K, V> getLeft() {
+ public OMVRBTreeEntry<K, V> getLeft() {
if (left == null && leftRid.isValid()) {
try {
// System.out.println("Node " + record.getIdentity() + " is loading LEFT node " + leftRid + "...");
@@ -371,11 +371,11 @@ public OTreeMapEntry<K, V> getLeft() {
}
@Override
- public void setLeft(final OTreeMapEntry<K, V> iLeft) {
+ public void setLeft(final OMVRBTreeEntry<K, V> iLeft) {
if (iLeft == left)
return;
- left = (OTreeMapEntryPersistent<K, V>) iLeft;
+ left = (OMVRBTreeEntryPersistent<K, V>) iLeft;
// if (left == null || !left.record.getIdentity().isValid() || !left.record.getIdentity().equals(leftRid)) {
markDirty();
this.leftRid = iLeft == null ? ORecordId.EMPTY_RECORD_ID : left.record.getIdentity();
@@ -388,7 +388,7 @@ public void setLeft(final OTreeMapEntry<K, V> iLeft) {
}
@Override
- public OTreeMapEntry<K, V> getRight() {
+ public OMVRBTreeEntry<K, V> getRight() {
if (rightRid.isValid() && right == null) {
// LAZY LOADING OF THE RIGHT LEAF
try {
@@ -406,11 +406,11 @@ public OTreeMapEntry<K, V> getRight() {
}
@Override
- public OTreeMapEntry<K, V> setRight(final OTreeMapEntry<K, V> iRight) {
+ public OMVRBTreeEntry<K, V> setRight(final OMVRBTreeEntry<K, V> iRight) {
if (iRight == right)
return this;
- right = (OTreeMapEntryPersistent<K, V>) iRight;
+ right = (OMVRBTreeEntryPersistent<K, V>) iRight;
// if (right == null || !right.record.getIdentity().isValid() || !right.record.getIdentity().equals(rightRid)) {
markDirty();
rightRid = iRight == null ? ORecordId.EMPTY_RECORD_ID : right.record.getIdentity();
@@ -458,10 +458,10 @@ public void checkEntryStructure() {
}
@Override
- protected void copyFrom(final OTreeMapEntry<K, V> iSource) {
+ protected void copyFrom(final OMVRBTreeEntry<K, V> iSource) {
markDirty();
- final OTreeMapEntryPersistent<K, V> source = (OTreeMapEntryPersistent<K, V>) iSource;
+ final OMVRBTreeEntryPersistent<K, V> source = (OMVRBTreeEntryPersistent<K, V>) iSource;
parent = source.parent;
left = source.left;
@@ -528,7 +528,7 @@ protected void remove() {
public K getKeyAt(final int iIndex) {
if (keys[iIndex] == null)
try {
- OProfiler.getInstance().updateCounter("OTreeMapEntryP.unserializeKey", 1);
+ OProfiler.getInstance().updateCounter("OMVRBTreeEntryP.unserializeKey", 1);
keys[iIndex] = (K) pTree.keySerializer.fromStream(serializedKeys[iIndex]);
} catch (IOException e) {
@@ -544,7 +544,7 @@ public K getKeyAt(final int iIndex) {
protected V getValueAt(final int iIndex) {
if (values[iIndex] == null)
try {
- OProfiler.getInstance().updateCounter("OTreeMapEntryP.unserializeValue", 1);
+ OProfiler.getInstance().updateCounter("OMVRBTreeEntryP.unserializeValue", 1);
values[iIndex] = (V) pTree.valueSerializer.fromStream(serializedValues[iIndex]);
} catch (IOException e) {
@@ -594,9 +594,9 @@ private int getMaxDepthInMemory(final int iCurrDepthLevel) {
/**
* Returns the successor of the current Entry only by traversing the memory, or null if no such.
*/
- public OTreeMapEntryPersistent<K, V> getNextInMemory() {
- OTreeMapEntryPersistent<K, V> t = this;
- OTreeMapEntryPersistent<K, V> p = null;
+ public OMVRBTreeEntryPersistent<K, V> getNextInMemory() {
+ OMVRBTreeEntryPersistent<K, V> t = this;
+ OMVRBTreeEntryPersistent<K, V> p = null;
if (t.right != null) {
p = t.right;
@@ -657,7 +657,7 @@ public final OSerializableStream fromStream(final byte[] iStream) throws OSerial
} finally {
buffer.close();
- OProfiler.getInstance().stopChrono("OTreeMapEntryP.fromStream", timer);
+ OProfiler.getInstance().stopChrono("OMVRBTreeEntryP.fromStream", timer);
}
}
@@ -675,7 +675,7 @@ public final byte[] toStream() throws OSerializationException {
// FORCE DIRTY
parent.record.setDirty();
- ((OTreeMapEntryDatabase<K, V>) parent).save();
+ ((OMVRBTreeEntryDatabase<K, V>) parent).save();
parentRid = parent.record.getIdentity();
record.setDirty();
}
@@ -684,7 +684,7 @@ public final byte[] toStream() throws OSerializationException {
// FORCE DIRTY
left.record.setDirty();
- ((OTreeMapEntryDatabase<K, V>) left).save();
+ ((OMVRBTreeEntryDatabase<K, V>) left).save();
leftRid = left.record.getIdentity();
record.setDirty();
}
@@ -693,7 +693,7 @@ public final byte[] toStream() throws OSerializationException {
// FORCE DIRTY
right.record.setDirty();
- ((OTreeMapEntryDatabase<K, V>) right).save();
+ ((OMVRBTreeEntryDatabase<K, V>) right).save();
rightRid = right.record.getIdentity();
record.setDirty();
}
@@ -736,7 +736,7 @@ public final byte[] toStream() throws OSerializationException {
checkEntryStructure();
- OProfiler.getInstance().stopChrono("OTreeMapEntryP.toStream", timer);
+ OProfiler.getInstance().stopChrono("OMVRBTreeEntryP.toStream", timer);
}
}
@@ -748,7 +748,7 @@ public final byte[] toStream() throws OSerializationException {
private void serializeNewKeys() throws IOException {
for (int i = 0; i < size; ++i) {
if (serializedKeys[i] == null) {
- OProfiler.getInstance().updateCounter("OTreeMapEntryP.serializeValue", 1);
+ OProfiler.getInstance().updateCounter("OMVRBTreeEntryP.serializeValue", 1);
serializedKeys[i] = pTree.keySerializer.toStream(keys[i]);
}
@@ -763,7 +763,7 @@ private void serializeNewKeys() throws IOException {
private void serializeNewValues() throws IOException {
for (int i = 0; i < size; ++i) {
if (serializedValues[i] == null) {
- OProfiler.getInstance().updateCounter("OTreeMapEntryP.serializeKey", 1);
+ OProfiler.getInstance().updateCounter("OMVRBTreeEntryP.serializeKey", 1);
serializedValues[i] = pTree.valueSerializer.toStream(values[i]);
}
@@ -791,10 +791,10 @@ private void markDirty() {
// public boolean equals(final Object o) {
// if (this == o)
// return true;
- // if (!(o instanceof OTreeMapEntryPersistent<?, ?>))
+ // if (!(o instanceof OMVRBTreeEntryPersistent<?, ?>))
// return false;
//
- // final OTreeMapEntryPersistent<?, ?> e = (OTreeMapEntryPersistent<?, ?>) o;
+ // final OMVRBTreeEntryPersistent<?, ?> e = (OMVRBTreeEntryPersistent<?, ?>) o;
//
// if (record != null && e.record != null)
// return record.getIdentity().equals(e.record.getIdentity());
@@ -809,17 +809,17 @@ private void markDirty() {
// }
@Override
- protected OTreeMapEntry<K, V> getLeftInMemory() {
+ protected OMVRBTreeEntry<K, V> getLeftInMemory() {
return left;
}
@Override
- protected OTreeMapEntry<K, V> getParentInMemory() {
+ protected OMVRBTreeEntry<K, V> getParentInMemory() {
return parent;
}
@Override
- protected OTreeMapEntry<K, V> getRightInMemory() {
+ protected OMVRBTreeEntry<K, V> getRightInMemory() {
return right;
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OTreeMapPersistent.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreePersistent.java
similarity index 78%
rename from core/src/main/java/com/orientechnologies/orient/core/type/tree/OTreeMapPersistent.java
rename to core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreePersistent.java
index 6d9bb304349..83a432d849c 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OTreeMapPersistent.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreePersistent.java
@@ -23,9 +23,9 @@
import java.util.Map;
import java.util.Set;
-import com.orientechnologies.common.collection.OTreeMap;
-import com.orientechnologies.common.collection.OTreeMapEntry;
-import com.orientechnologies.common.collection.OTreeMapEventListener;
+import com.orientechnologies.common.collection.OMVRBTree;
+import com.orientechnologies.common.collection.OMVRBTreeEntry;
+import com.orientechnologies.common.collection.OMVRBTreeEventListener;
import com.orientechnologies.common.concur.resource.OSharedResourceExternal;
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.common.profiler.OProfiler;
@@ -43,17 +43,15 @@
import com.orientechnologies.orient.core.serialization.OSerializableStream;
import com.orientechnologies.orient.core.serialization.serializer.record.OSerializationThreadLocal;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializer;
-import com.orientechnologies.orient.core.storage.impl.local.OClusterLogical;
/**
- * Persistent TreeMap implementation. The difference with the class OTreeMapPersistent is the level. In facts this class works
- * directly at the storage level, while the other at database level. This class is used for Logical Clusters. It can'be
+ * Persistent based MVRB-Tree implementation. The difference with the class OMVRBTreePersistent is the level. In facts this class
+ * works directly at the storage level, while the other at database level. This class is used for Logical Clusters. It can'be
* transactional. It uses the entryPoints linked list to get the best entry point for searching a node.
*
- * @see OClusterLogical
*/
@SuppressWarnings("serial")
-public abstract class OTreeMapPersistent<K, V> extends OTreeMap<K, V> implements OTreeMapEventListener<K, V>, OSerializableStream {
+public abstract class OMVRBTreePersistent<K, V> extends OMVRBTree<K, V> implements OMVRBTreeEventListener<K, V>, OSerializableStream {
protected int optimizeThreshold;
protected OSharedResourceExternal lock = new OSharedResourceExternal();
@@ -61,7 +59,7 @@ public abstract class OTreeMapPersistent<K, V> extends OTreeMap<K, V> implements
protected OStreamSerializer keySerializer;
protected OStreamSerializer valueSerializer;
- protected final Set<OTreeMapEntryPersistent<K, V>> recordsToCommit = new HashSet<OTreeMapEntryPersistent<K, V>>();
+ protected final Set<OMVRBTreeEntryPersistent<K, V>> recordsToCommit = new HashSet<OMVRBTreeEntryPersistent<K, V>>();
protected final OMemoryOutputStream entryRecordBuffer;
protected final String clusterName;
@@ -72,20 +70,20 @@ public abstract class OTreeMapPersistent<K, V> extends OTreeMap<K, V> implements
// STORES IN MEMORY DIRECT REFERENCES TO PORTION OF THE TREE
protected int entryPointsSize;
protected float optimizeEntryPointsFactor;
- protected volatile List<OTreeMapEntryPersistent<K, V>> entryPoints = new ArrayList<OTreeMapEntryPersistent<K, V>>(
+ protected volatile List<OMVRBTreeEntryPersistent<K, V>> entryPoints = new ArrayList<OMVRBTreeEntryPersistent<K, V>>(
entryPointsSize);
- protected List<OTreeMapEntryPersistent<K, V>> newEntryPoints = new ArrayList<OTreeMapEntryPersistent<K, V>>(
+ protected List<OMVRBTreeEntryPersistent<K, V>> newEntryPoints = new ArrayList<OMVRBTreeEntryPersistent<K, V>>(
entryPointsSize);
- // protected Map<ORID, OTreeMapEntryPersistent<K, V>> cache = new HashMap<ORID, OTreeMapEntryPersistent<K, V>>();
+ // protected Map<ORID, OMVRBTreeEntryPersistent<K, V>> cache = new HashMap<ORID, OMVRBTreeEntryPersistent<K, V>>();
- public OTreeMapPersistent(final String iClusterName, final ORID iRID) {
+ public OMVRBTreePersistent(final String iClusterName, final ORID iRID) {
this(iClusterName, null, null);
record.setIdentity(iRID.getClusterId(), iRID.getClusterPosition());
config();
}
- public OTreeMapPersistent(String iClusterName, final OStreamSerializer iKeySerializer, final OStreamSerializer iValueSerializer) {
+ public OMVRBTreePersistent(String iClusterName, final OStreamSerializer iKeySerializer, final OStreamSerializer iValueSerializer) {
// MINIMIZE I/O USING A LARGER PAGE THAN THE DEFAULT USED IN MEMORY
super(1024, 0.7f);
config();
@@ -101,16 +99,16 @@ record = new ORecordBytesLazy(this);
setListener(this);
}
- public abstract OTreeMapPersistent<K, V> load() throws IOException;
+ public abstract OMVRBTreePersistent<K, V> load() throws IOException;
- public abstract OTreeMapPersistent<K, V> save() throws IOException;
+ public abstract OMVRBTreePersistent<K, V> save() throws IOException;
protected abstract void serializerFromStream(OMemoryInputStream stream) throws IOException;
/**
* Lazy loads a node.
*/
- protected abstract OTreeMapEntryPersistent<K, V> loadEntry(OTreeMapEntryPersistent<K, V> iParent, ORID iRecordId)
+ protected abstract OMVRBTreeEntryPersistent<K, V> loadEntry(OMVRBTreeEntryPersistent<K, V> iParent, ORID iRecordId)
throws IOException;
@Override
@@ -120,7 +118,7 @@ public void clear() {
try {
if (root != null) {
- ((OTreeMapEntryPersistent<K, V>) root).delete();
+ ((OMVRBTreeEntryPersistent<K, V>) root).delete();
super.clear();
getListener().signalTreeChanged(this);
}
@@ -135,7 +133,7 @@ public void clear() {
} finally {
lock.releaseExclusiveLock();
- OProfiler.getInstance().stopChrono("OTreeMapPersistent.clear", timer);
+ OProfiler.getInstance().stopChrono("OMVRBTreePersistent.clear", timer);
}
}
@@ -148,7 +146,7 @@ public void unload() {
try {
// DISCONNECT ALL THE NODES
- for (OTreeMapEntryPersistent<K, V> entryPoint : entryPoints)
+ for (OMVRBTreeEntryPersistent<K, V> entryPoint : entryPoints)
entryPoint.disconnect(true);
entryPoints.clear();
@@ -165,7 +163,7 @@ public void unload() {
} finally {
lock.releaseExclusiveLock();
- OProfiler.getInstance().stopChrono("OTreeMapPersistent.unload", timer);
+ OProfiler.getInstance().stopChrono("OMVRBTreePersistent.unload", timer);
}
}
@@ -185,7 +183,7 @@ public void optimize() {
// printInMemoryStructure();
- OTreeMapEntryPersistent<K, V> pRoot = (OTreeMapEntryPersistent<K, V>) root;
+ OMVRBTreeEntryPersistent<K, V> pRoot = (OMVRBTreeEntryPersistent<K, V>) root;
final int depth = pRoot.getMaxDepthInMemory();
@@ -196,8 +194,8 @@ public void optimize() {
pRoot.checkToDisconnect((int) (entryPointsSize * optimizeEntryPointsFactor));
if (isRuntimeCheckEnabled()) {
- for (OTreeMapEntryPersistent<K, V> entryPoint : entryPoints)
- for (OTreeMapEntryPersistent<K, V> e = (OTreeMapEntryPersistent<K, V>) entryPoint.getFirstInMemory(); e != null; e = e
+ for (OMVRBTreeEntryPersistent<K, V> entryPoint : entryPoints)
+ for (OMVRBTreeEntryPersistent<K, V> e = (OMVRBTreeEntryPersistent<K, V>) entryPoint.getFirstInMemory(); e != null; e = e
.getNextInMemory())
e.checkEntryStructure();
}
@@ -208,14 +206,14 @@ public void optimize() {
if (isRuntimeCheckEnabled()) {
if (entryPoints.size() > 0)
- for (OTreeMapEntryPersistent<K, V> entryPoint : entryPoints)
+ for (OMVRBTreeEntryPersistent<K, V> entryPoint : entryPoints)
checkTreeStructure(entryPoint.getFirstInMemory());
else
checkTreeStructure(root);
}
lock.releaseExclusiveLock();
- OProfiler.getInstance().stopChrono("OTreeMapPersistent.optimize", timer);
+ OProfiler.getInstance().stopChrono("OMVRBTreePersistent.optimize", timer);
if (OLogManager.instance().isDebugEnabled())
OLogManager.instance().debug(this, "Optimization completed in %d ms\n", System.currentTimeMillis() - timer);
@@ -239,7 +237,7 @@ public V put(final K key, final V value) {
} finally {
lock.releaseExclusiveLock();
- OProfiler.getInstance().stopChrono("OTreeMapPersistent.put", timer);
+ OProfiler.getInstance().stopChrono("OMVRBTreePersistent.put", timer);
}
}
@@ -260,7 +258,7 @@ public void putAll(final Map<? extends K, ? extends V> map) {
} finally {
lock.releaseExclusiveLock();
- OProfiler.getInstance().stopChrono("OTreeMapPersistent.putAll", timer);
+ OProfiler.getInstance().stopChrono("OMVRBTreePersistent.putAll", timer);
}
}
@@ -286,7 +284,7 @@ public void commitChanges(final ODatabaseRecord<?> iDatabase) {
try {
if (recordsToCommit.size() > 0) {
- final List<OTreeMapEntryPersistent<K, V>> tmp = new ArrayList<OTreeMapEntryPersistent<K, V>>();
+ final List<OMVRBTreeEntryPersistent<K, V>> tmp = new ArrayList<OMVRBTreeEntryPersistent<K, V>>();
while (recordsToCommit.iterator().hasNext()) {
// COMMIT BEFORE THE NEW RECORDS (TO ASSURE RID IN RELATIONSHIPS)
@@ -294,7 +292,7 @@ public void commitChanges(final ODatabaseRecord<?> iDatabase) {
recordsToCommit.clear();
- for (OTreeMapEntryPersistent<K, V> node : tmp)
+ for (OMVRBTreeEntryPersistent<K, V> node : tmp)
if (node.record.isDirty()) {
if (iDatabase != null)
// REPLACE THE DATABASE WITH THE NEW ACQUIRED
@@ -325,7 +323,7 @@ public void commitChanges(final ODatabaseRecord<?> iDatabase) {
} finally {
lock.releaseExclusiveLock();
- OProfiler.getInstance().stopChrono("OTreeMapPersistent.commitChanges", timer);
+ OProfiler.getInstance().stopChrono("OMVRBTreePersistent.commitChanges", timer);
}
}
@@ -352,11 +350,11 @@ public OSerializableStream fromStream(final byte[] iStream) throws OSerializatio
} catch (Exception e) {
- OLogManager.instance().error(this, "Error on unmarshalling OTreeMapPersistent object from record: %s", e,
+ OLogManager.instance().error(this, "Error on unmarshalling OMVRBTreePersistent object from record: %s", e,
OSerializationException.class, rootRid);
} finally {
- OProfiler.getInstance().stopChrono("OTreeMapPersistent.fromStream", timer);
+ OProfiler.getInstance().stopChrono("OMVRBTreePersistent.fromStream", timer);
}
return this;
}
@@ -377,7 +375,7 @@ public byte[] toStream() throws OSerializationException {
try {
if (root != null) {
- OTreeMapEntryPersistent<K, V> pRoot = (OTreeMapEntryPersistent<K, V>) root;
+ OMVRBTreeEntryPersistent<K, V> pRoot = (OMVRBTreeEntryPersistent<K, V>) root;
if (pRoot.record.getIdentity().isNew()) {
// FIRST TIME: SAVE IT
pRoot.save();
@@ -401,16 +399,16 @@ public byte[] toStream() throws OSerializationException {
} finally {
marshalledRecords.remove(identityRecord);
- OProfiler.getInstance().stopChrono("OTreeMapPersistent.toStream", timer);
+ OProfiler.getInstance().stopChrono("OMVRBTreePersistent.toStream", timer);
}
}
- public void signalTreeChanged(final OTreeMap<K, V> iTree) {
+ public void signalTreeChanged(final OMVRBTree<K, V> iTree) {
record.setDirty();
}
- public void signalNodeChanged(final OTreeMapEntry<K, V> iNode) {
- recordsToCommit.add((OTreeMapEntryPersistent<K, V>) iNode);
+ public void signalNodeChanged(final OMVRBTreeEntry<K, V> iNode) {
+ recordsToCommit.add((OMVRBTreeEntryPersistent<K, V>) iNode);
}
@Override
@@ -601,7 +599,7 @@ protected void updateUsageCounter() {
*/
@SuppressWarnings("unchecked")
@Override
- protected OTreeMapEntry<K, V> getBestEntryPoint(final Object iKey) {
+ protected OMVRBTreeEntry<K, V> getBestEntryPoint(final Object iKey) {
final Comparable<? super K> key = (Comparable<? super K>) iKey;
if (entryPoints.size() == 0)
@@ -609,11 +607,11 @@ protected OTreeMapEntry<K, V> getBestEntryPoint(final Object iKey) {
return root;
// SEARCH THE BEST KEY
- OTreeMapEntryPersistent<K, V> e;
+ OMVRBTreeEntryPersistent<K, V> e;
int entryPointSize = entryPoints.size();
int cmp;
- OTreeMapEntryPersistent<K, V> bestNode = null;
- if (entryPointSize < OTreeMapEntry.BINARY_SEARCH_THRESHOLD) {
+ OMVRBTreeEntryPersistent<K, V> bestNode = null;
+ if (entryPointSize < OMVRBTreeEntry.BINARY_SEARCH_THRESHOLD) {
// LINEAR SEARCH
for (int i = 0; i < entryPointSize; ++i) {
e = entryPoints.get(i);
@@ -687,7 +685,7 @@ protected OTreeMapEntry<K, V> getBestEntryPoint(final Object iKey) {
/**
* Remove an entry point from the list
*/
- void removeEntryPoint(final OTreeMapEntryPersistent<K, V> iEntry) {
+ void removeEntryPoint(final OMVRBTreeEntryPersistent<K, V> iEntry) {
for (int i = 0; i < entryPoints.size(); ++i)
if (entryPoints.get(i) == iEntry) {
entryPoints.remove(i);
@@ -696,16 +694,16 @@ void removeEntryPoint(final OTreeMapEntryPersistent<K, V> iEntry) {
}
/**
- * Returns the first Entry in the OTreeMap (according to the OTreeMap's key-sort function). Returns null if the OTreeMap is empty.
+ * Returns the first Entry in the OMVRBTree (according to the OMVRBTree's key-sort function). Returns null if the OMVRBTree is empty.
*/
@Override
- protected OTreeMapEntry<K, V> getFirstEntry() {
+ protected OMVRBTreeEntry<K, V> getFirstEntry() {
if (entryPoints.size() > 0) {
// FIND THE FIRST ELEMENT STARTING FROM THE FIRST NODE
- OTreeMapEntryPersistent<K, V> e = entryPoints.get(0);
+ OMVRBTreeEntryPersistent<K, V> e = entryPoints.get(0);
while (e.getLeft() != null) {
- e = (OTreeMapEntryPersistent<K, V>) e.getLeft();
+ e = (OMVRBTreeEntryPersistent<K, V>) e.getLeft();
}
return e;
}
@@ -715,12 +713,12 @@ protected OTreeMapEntry<K, V> getFirstEntry() {
// private void printInMemoryStructure() {
// System.out.println("* Entrypoints (" + entryPoints.size() + "), in cache=" + cache.size() + ": *");
- // for (OTreeMapEntryPersistent<K, V> entryPoint : entryPoints)
+ // for (OMVRBTreeEntryPersistent<K, V> entryPoint : entryPoints)
// printInMemoryStructure(entryPoint);
// }
@Override
- protected void setRoot(final OTreeMapEntry<K, V> iRoot) {
+ protected void setRoot(final OMVRBTreeEntry<K, V> iRoot) {
if (iRoot == root)
return;
diff --git a/kv/src/main/java/com/orientechnologies/orient/kv/index/OTreeMapPersistentAsynch.java b/kv/src/main/java/com/orientechnologies/orient/kv/index/OMVRBTreePersistentAsynch.java
similarity index 59%
rename from kv/src/main/java/com/orientechnologies/orient/kv/index/OTreeMapPersistentAsynch.java
rename to kv/src/main/java/com/orientechnologies/orient/kv/index/OMVRBTreePersistentAsynch.java
index a8dd0f816a0..952cea1fc7e 100644
--- a/kv/src/main/java/com/orientechnologies/orient/kv/index/OTreeMapPersistentAsynch.java
+++ b/kv/src/main/java/com/orientechnologies/orient/kv/index/OMVRBTreePersistentAsynch.java
@@ -4,46 +4,46 @@
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializer;
-import com.orientechnologies.orient.core.type.tree.OTreeMapDatabase;
+import com.orientechnologies.orient.core.type.tree.OMVRBTreeDatabase;
import com.orientechnologies.orient.kv.OSharedBinaryDatabase;
/**
* Wrapper class for persistent tree map. It handles the asynchronous commit of changes done by the external
- * OTreeMapPersistentAsynchThread singleton thread.
+ * OMVRBTreePersistentAsynchThread singleton thread.
*
* @author Luca Garulli
*
* @param <K>
* @param <V>
- * @see OTreeMapPersistentAsynchThread
+ * @see OMVRBTreePersistentAsynchThread
*/
@SuppressWarnings("serial")
-public class OTreeMapPersistentAsynch<K, V> extends OTreeMapDatabase<K, V> {
+public class OMVRBTreePersistentAsynch<K, V> extends OMVRBTreeDatabase<K, V> {
- public OTreeMapPersistentAsynch(final ODatabaseRecord<?> iDatabase, final String iClusterName,
+ public OMVRBTreePersistentAsynch(final ODatabaseRecord<?> iDatabase, final String iClusterName,
final OStreamSerializer iKeySerializer, final OStreamSerializer iValueSerializer) {
super(iDatabase, iClusterName, iKeySerializer, iValueSerializer);
- OTreeMapPersistentAsynchThread.getInstance().registerMap(this);
+ OMVRBTreePersistentAsynchThread.getInstance().registerMap(this);
}
- public OTreeMapPersistentAsynch(final ODatabaseRecord<?> iDatabase, final ORID iRID) {
+ public OMVRBTreePersistentAsynch(final ODatabaseRecord<?> iDatabase, final ORID iRID) {
super(iDatabase, iRID);
- OTreeMapPersistentAsynchThread.getInstance().registerMap(this);
+ OMVRBTreePersistentAsynchThread.getInstance().registerMap(this);
}
/**
- * Doesn't commit changes since they are scheduled by the external OTreeMapPersistentAsynchThread singleton thread.
+ * Doesn't commit changes since they are scheduled by the external OMVRBTreePersistentAsynchThread singleton thread.
*
- * @see OTreeMapPersistentAsynchThread#execute()
+ * @see OMVRBTreePersistentAsynchThread#execute()
*/
@Override
public void commitChanges(final ODatabaseRecord<?> iDatabase) {
}
/**
- * Commits changes for real. It's called by OTreeMapPersistentAsynchThread singleton thread.
+ * Commits changes for real. It's called by OMVRBTreePersistentAsynchThread singleton thread.
*
- * @see OTreeMapPersistentAsynchThread#execute()
+ * @see OMVRBTreePersistentAsynchThread#execute()
*/
public void executeCommitChanges() {
ODatabaseBinary db = null;
diff --git a/kv/src/main/java/com/orientechnologies/orient/kv/index/OTreeMapPersistentAsynchThread.java b/kv/src/main/java/com/orientechnologies/orient/kv/index/OMVRBTreePersistentAsynchThread.java
similarity index 73%
rename from kv/src/main/java/com/orientechnologies/orient/kv/index/OTreeMapPersistentAsynchThread.java
rename to kv/src/main/java/com/orientechnologies/orient/kv/index/OMVRBTreePersistentAsynchThread.java
index 232780bd2e9..54251ca97c8 100644
--- a/kv/src/main/java/com/orientechnologies/orient/kv/index/OTreeMapPersistentAsynchThread.java
+++ b/kv/src/main/java/com/orientechnologies/orient/kv/index/OMVRBTreePersistentAsynchThread.java
@@ -28,13 +28,13 @@
* @author Luca Garulli
*
*/
-public class OTreeMapPersistentAsynchThread extends OSoftThread {
+public class OMVRBTreePersistentAsynchThread extends OSoftThread {
private long delay = 0;
- private Set<OTreeMapPersistentAsynch<?, ?>> maps = new HashSet<OTreeMapPersistentAsynch<?, ?>>();
- private static OTreeMapPersistentAsynchThread instance = new OTreeMapPersistentAsynchThread();
+ private Set<OMVRBTreePersistentAsynch<?, ?>> maps = new HashSet<OMVRBTreePersistentAsynch<?, ?>>();
+ private static OMVRBTreePersistentAsynchThread instance = new OMVRBTreePersistentAsynchThread();
- public OTreeMapPersistentAsynchThread setDelay(final int iDelay) {
+ public OMVRBTreePersistentAsynchThread setDelay(final int iDelay) {
delay = iDelay;
return this;
}
@@ -44,13 +44,13 @@ public OTreeMapPersistentAsynchThread setDelay(final int iDelay) {
*
* @param iMap
*/
- public synchronized void registerMap(final OTreeMapPersistentAsynch<?, ?> iMap) {
+ public synchronized void registerMap(final OMVRBTreePersistentAsynch<?, ?> iMap) {
maps.add(iMap);
}
@Override
protected synchronized void execute() throws Exception {
- for (OTreeMapPersistentAsynch<?, ?> map : maps) {
+ for (OMVRBTreePersistentAsynch<?, ?> map : maps) {
try {
synchronized (map) {
@@ -68,7 +68,7 @@ protected void afterExecution() throws InterruptedException {
pauseCurrentThread(delay);
}
- public static OTreeMapPersistentAsynchThread getInstance() {
+ public static OMVRBTreePersistentAsynchThread getInstance() {
return instance;
}
}
diff --git a/kv/src/main/java/com/orientechnologies/orient/kv/network/protocol/http/OKVDictionaryBucketManager.java b/kv/src/main/java/com/orientechnologies/orient/kv/network/protocol/http/OKVDictionaryBucketManager.java
index 3ad704979c2..39031630f10 100644
--- a/kv/src/main/java/com/orientechnologies/orient/kv/network/protocol/http/OKVDictionaryBucketManager.java
+++ b/kv/src/main/java/com/orientechnologies/orient/kv/network/protocol/http/OKVDictionaryBucketManager.java
@@ -22,8 +22,8 @@
import com.orientechnologies.orient.core.db.record.ODatabaseBinary;
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializerString;
-import com.orientechnologies.orient.core.type.tree.OTreeMapDatabase;
-import com.orientechnologies.orient.kv.index.OTreeMapPersistentAsynch;
+import com.orientechnologies.orient.core.type.tree.OMVRBTreeDatabase;
+import com.orientechnologies.orient.kv.index.OMVRBTreePersistentAsynch;
/**
* Caches bucket tree maps to be reused across calls.
@@ -32,12 +32,12 @@
*
*/
public class OKVDictionaryBucketManager {
- private static Map<String, OTreeMapDatabase<String, String>> bucketCache = new HashMap<String, OTreeMapDatabase<String, String>>();
+ private static Map<String, OMVRBTreeDatabase<String, String>> bucketCache = new HashMap<String, OMVRBTreeDatabase<String, String>>();
private static final String DEFAULT_CLUSTER_NAME = "default";
public static synchronized Map<String, String> getDictionaryBucket(final ODatabaseBinary iDatabase, final String iName,
final boolean iAsynchMode) throws IOException {
- OTreeMapDatabase<String, String> bucket = bucketCache.get(iDatabase.getName() + ":" + iName);
+ OMVRBTreeDatabase<String, String> bucket = bucketCache.get(iDatabase.getName() + ":" + iName);
if (bucket != null)
return bucket;
@@ -47,10 +47,10 @@ public static synchronized Map<String, String> getDictionaryBucket(final ODataba
if (record == null) {
// CREATE THE BUCKET TRANSPARENTLY
if (iAsynchMode)
- bucket = new OTreeMapPersistentAsynch<String, String>(iDatabase, DEFAULT_CLUSTER_NAME, OStreamSerializerString.INSTANCE,
+ bucket = new OMVRBTreePersistentAsynch<String, String>(iDatabase, DEFAULT_CLUSTER_NAME, OStreamSerializerString.INSTANCE,
OStreamSerializerString.INSTANCE);
else
- bucket = new OTreeMapDatabase<String, String>(iDatabase, DEFAULT_CLUSTER_NAME, OStreamSerializerString.INSTANCE,
+ bucket = new OMVRBTreeDatabase<String, String>(iDatabase, DEFAULT_CLUSTER_NAME, OStreamSerializerString.INSTANCE,
OStreamSerializerString.INSTANCE);
bucket.save();
@@ -58,9 +58,9 @@ public static synchronized Map<String, String> getDictionaryBucket(final ODataba
iDatabase.getDictionary().put(iName, bucket.getRecord());
} else {
if (iAsynchMode)
- bucket = new OTreeMapPersistentAsynch<String, String>(iDatabase, record.getIdentity());
+ bucket = new OMVRBTreePersistentAsynch<String, String>(iDatabase, record.getIdentity());
else
- bucket = new OTreeMapDatabase<String, String>(iDatabase, record.getIdentity());
+ bucket = new OMVRBTreeDatabase<String, String>(iDatabase, record.getIdentity());
bucket.load();
}
diff --git a/kv/src/main/java/com/orientechnologies/orient/kv/network/protocol/http/local/ONetworkProtocolHttpKVLocal.java b/kv/src/main/java/com/orientechnologies/orient/kv/network/protocol/http/local/ONetworkProtocolHttpKVLocal.java
index 000cebfbc77..db38c31f2cd 100644
--- a/kv/src/main/java/com/orientechnologies/orient/kv/network/protocol/http/local/ONetworkProtocolHttpKVLocal.java
+++ b/kv/src/main/java/com/orientechnologies/orient/kv/network/protocol/http/local/ONetworkProtocolHttpKVLocal.java
@@ -21,7 +21,7 @@
import com.orientechnologies.common.exception.OException;
import com.orientechnologies.orient.core.db.record.ODatabaseBinary;
import com.orientechnologies.orient.kv.OSharedBinaryDatabase;
-import com.orientechnologies.orient.kv.index.OTreeMapPersistentAsynchThread;
+import com.orientechnologies.orient.kv.index.OMVRBTreePersistentAsynchThread;
import com.orientechnologies.orient.kv.network.protocol.http.OKVDictionary;
import com.orientechnologies.orient.kv.network.protocol.http.OKVDictionaryBucketManager;
import com.orientechnologies.orient.kv.network.protocol.http.ONetworkProtocolHttpKV;
@@ -37,8 +37,8 @@ public class ONetworkProtocolHttpKVLocal extends ONetworkProtocolHttpKV implemen
// START ASYNCH THREAD IF CONFIGURED
String v = OServerMain.server().getConfiguration().getProperty(ASYNCH_COMMIT_DELAY_PAR);
if (v != null) {
- OTreeMapPersistentAsynchThread.getInstance().setDelay(Integer.parseInt(v));
- OTreeMapPersistentAsynchThread.getInstance().start();
+ OMVRBTreePersistentAsynchThread.getInstance().setDelay(Integer.parseInt(v));
+ OMVRBTreePersistentAsynchThread.getInstance().start();
asynchMode = true;
}
//
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/internal/index/OTreeMapSpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/internal/index/OMVRBTreeSpeedTest.java
similarity index 82%
rename from tests/src/test/java/com/orientechnologies/orient/test/internal/index/OTreeMapSpeedTest.java
rename to tests/src/test/java/com/orientechnologies/orient/test/internal/index/OMVRBTreeSpeedTest.java
index dda27ba43f1..685c27d0e3c 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/internal/index/OTreeMapSpeedTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/internal/index/OMVRBTreeSpeedTest.java
@@ -18,13 +18,13 @@
import org.testng.Assert;
import org.testng.annotations.Test;
+import com.orientechnologies.common.collection.OMVRBTreeMemory;
import com.orientechnologies.common.collection.ONavigableMap;
-import com.orientechnologies.common.collection.OTreeMapMemory;
import com.orientechnologies.common.test.SpeedTestMonoThread;
-public class OTreeMapSpeedTest extends SpeedTestMonoThread {
+public class OMVRBTreeSpeedTest extends SpeedTestMonoThread {
- private ONavigableMap<Integer, Integer> tree = new OTreeMapMemory<Integer, Integer>();
+ private ONavigableMap<Integer, Integer> tree = new OMVRBTreeMemory<Integer, Integer>();
@Override
@Test(enabled = false)
@@ -61,8 +61,8 @@ public void cycle() {
}
data.printSnapshot();
- // if (tree instanceof OTreeMap<?, ?>) {
- // System.out.println("Total nodes: " + ((OTreeMap<?, ?>) tree).getNodes());
+ // if (tree instanceof OMVRBTree<?, ?>) {
+ // System.out.println("Total nodes: " + ((OMVRBTree<?, ?>) tree).getNodes());
// }
System.out.println("Delete all the elements one by one...");
|
5b4e584e455a1d4713119308c846d66fcfd52d9a
|
drools
|
JBRULES-2439--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@31842 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
a
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/main/java/org/drools/rule/builder/dialect/java/JavaDialectConfiguration.java b/drools-compiler/src/main/java/org/drools/rule/builder/dialect/java/JavaDialectConfiguration.java
index 88603d2b226..394a3502c7d 100644
--- a/drools-compiler/src/main/java/org/drools/rule/builder/dialect/java/JavaDialectConfiguration.java
+++ b/drools-compiler/src/main/java/org/drools/rule/builder/dialect/java/JavaDialectConfiguration.java
@@ -150,6 +150,8 @@ private String getDefaultLanguageLevel() {
level = "1.5";
} else if ( version.startsWith( "1.6" ) ) {
level = "1.6";
+ } else if ( version.startsWith( "1.7" ) ) {
+ level = "1.7";
} else {
level = "1.5";
}
|
66b20f84864ea54b306cc5bc57c0939d80588f88
|
hadoop
|
YARN-295. Fixed a race condition in ResourceManager- RMAppAttempt state machine. Contributed by Mayank Bansal. svn merge- --ignore-ancestry -c 1501856 ../../trunk/--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1501857 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 83fb4293beb6a..6ca375b0e684e 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -46,6 +46,9 @@ Release 2.1.1-beta - UNRELEASED
YARN-368. Fixed a typo in error message in Auxiliary services. (Albert Chu
via vinodkv)
+ YARN-295. Fixed a race condition in ResourceManager RMAppAttempt state
+ machine. (Mayank Bansal via vinodkv)
+
Release 2.1.0-beta - 2013-07-02
INCOMPATIBLE CHANGES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
index dd9c42260a62d..11fdd9442f06f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
@@ -245,6 +245,10 @@ RMAppAttemptEventType.LAUNCH_FAILED, new LaunchFailedTransition())
.addTransition(RMAppAttemptState.ALLOCATED, RMAppAttemptState.KILLED,
RMAppAttemptEventType.KILL, new KillAllocatedAMTransition())
+ .addTransition(RMAppAttemptState.ALLOCATED, RMAppAttemptState.FAILED,
+ RMAppAttemptEventType.CONTAINER_FINISHED,
+ new AMContainerCrashedTransition())
+
// Transitions from LAUNCHED State
.addTransition(RMAppAttemptState.LAUNCHED, RMAppAttemptState.RUNNING,
RMAppAttemptEventType.REGISTERED, new AMRegisteredTransition())
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestRMAppAttemptTransitions.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestRMAppAttemptTransitions.java
index a394110b46931..cafe4f9a7056a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestRMAppAttemptTransitions.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestRMAppAttemptTransitions.java
@@ -654,6 +654,20 @@ public void testAllocatedToFailed() {
testAppAttemptFailedState(amContainer, diagnostics);
}
+ @Test
+ public void testAMCrashAtAllocated() {
+ Container amContainer = allocateApplicationAttempt();
+ String containerDiagMsg = "some error";
+ int exitCode = 123;
+ ContainerStatus cs =
+ BuilderUtils.newContainerStatus(amContainer.getId(),
+ ContainerState.COMPLETE, containerDiagMsg, exitCode);
+ applicationAttempt.handle(new RMAppAttemptContainerFinishedEvent(
+ applicationAttempt.getAppAttemptId(), cs));
+ assertEquals(RMAppAttemptState.FAILED,
+ applicationAttempt.getAppAttemptState());
+ }
+
@Test
public void testRunningToFailed() {
Container amContainer = allocateApplicationAttempt();
|
190aabbbe8d382b9b960198d8d895ab98c117893
|
camel
|
CAMEL-3689: AdviceWith can now manipulate routes.- This allows you for example to replace parts of routes during testing.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1072545 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/main/java/org/apache/camel/builder/AdviceWithBuilder.java b/camel-core/src/main/java/org/apache/camel/builder/AdviceWithBuilder.java
new file mode 100644
index 0000000000000..1a62f43fbb1a8
--- /dev/null
+++ b/camel-core/src/main/java/org/apache/camel/builder/AdviceWithBuilder.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.builder;
+
+import org.apache.camel.model.PipelineDefinition;
+import org.apache.camel.model.ProcessorDefinition;
+
+/**
+ * A builder when using the <a href="http://camel.apache.org/advicewith.html">advice with</a> feature.
+ */
+public class AdviceWithBuilder {
+
+ private final AdviceWithRouteBuilder builder;
+ private final String id;
+ private final String toString;
+
+ public AdviceWithBuilder(AdviceWithRouteBuilder builder, String id, String toString) {
+ this.builder = builder;
+ this.id = id;
+ this.toString = toString;
+
+ if (id == null && toString == null) {
+ throw new IllegalArgumentException("Either id or toString must be specified");
+ }
+ }
+
+ /**
+ * Replaces the matched node(s) with the following nodes.
+ *
+ * @return the builder to build the nodes.
+ */
+ public ProcessorDefinition replace() {
+ PipelineDefinition answer = new PipelineDefinition();
+ if (id != null) {
+ builder.getAdviceWithTasks().add(AdviceWithTasks.replaceById(builder.getOriginalRoute(), id, answer));
+ } else if (toString != null) {
+ builder.getAdviceWithTasks().add(AdviceWithTasks.replaceByToString(builder.getOriginalRoute(), toString, answer));
+ }
+ return answer;
+ }
+
+ /**
+ * Removes the matched node(s)
+ */
+ public void remove() {
+ if (id != null) {
+ builder.getAdviceWithTasks().add(AdviceWithTasks.removeById(builder.getOriginalRoute(), id));
+ } else if (toString != null) {
+ builder.getAdviceWithTasks().add(AdviceWithTasks.removeByToString(builder.getOriginalRoute(), toString));
+ }
+ }
+
+ /**
+ * Insert the following node(s) <b>before</b> the matched node(s)
+ *
+ * @return the builder to build the nodes.
+ */
+ public ProcessorDefinition before() {
+ PipelineDefinition answer = new PipelineDefinition();
+ if (id != null) {
+ builder.getAdviceWithTasks().add(AdviceWithTasks.beforeById(builder.getOriginalRoute(), id, answer));
+ } else if (toString != null) {
+ builder.getAdviceWithTasks().add(AdviceWithTasks.beforeByToString(builder.getOriginalRoute(), toString, answer));
+ }
+ return answer;
+ }
+
+ /**
+ * Insert the following node(s) <b>after</b> the matched node(s)
+ *
+ * @return the builder to build the nodes.
+ */
+ public ProcessorDefinition after() {
+ PipelineDefinition answer = new PipelineDefinition();
+ if (id != null) {
+ builder.getAdviceWithTasks().add(AdviceWithTasks.afterById(builder.getOriginalRoute(), id, answer));
+ } else if (toString != null) {
+ builder.getAdviceWithTasks().add(AdviceWithTasks.afterByToString(builder.getOriginalRoute(), toString, answer));
+ }
+ return answer;
+ }
+
+}
diff --git a/camel-core/src/main/java/org/apache/camel/builder/AdviceWithRouteBuilder.java b/camel-core/src/main/java/org/apache/camel/builder/AdviceWithRouteBuilder.java
index 7d77bbb81bdb8..04d78d2267912 100644
--- a/camel-core/src/main/java/org/apache/camel/builder/AdviceWithRouteBuilder.java
+++ b/camel-core/src/main/java/org/apache/camel/builder/AdviceWithRouteBuilder.java
@@ -16,18 +16,56 @@
*/
package org.apache.camel.builder;
+import java.util.ArrayList;
+import java.util.List;
+
import org.apache.camel.impl.InterceptSendToMockEndpointStrategy;
+import org.apache.camel.model.RouteDefinition;
+import org.apache.camel.util.ObjectHelper;
/**
- * A {@link RouteBuilder} which has extended features when using
- * {@link org.apache.camel.model.RouteDefinition#adviceWith(org.apache.camel.CamelContext, RouteBuilder) adviceWith}.
+ * A {@link RouteBuilder} which has extended capabilities when using
+ * the <a href="http://camel.apache.org/advicewith.html">advice with</a> feature.
*
- * @version
+ * @see org.apache.camel.model.RouteDefinition#adviceWith(org.apache.camel.CamelContext, RouteBuilder)
*/
public abstract class AdviceWithRouteBuilder extends RouteBuilder {
+ private RouteDefinition originalRoute;
+ private final List<AdviceWithTask> adviceWithTasks = new ArrayList<AdviceWithTask>();
+
+ /**
+ * Sets the original route which we advice.
+ *
+ * @param originalRoute the original route we advice.
+ */
+ public void setOriginalRoute(RouteDefinition originalRoute) {
+ this.originalRoute = originalRoute;
+ }
+
+ /**
+ * Gets the original route we advice.
+ *
+ * @return the original route.
+ */
+ public RouteDefinition getOriginalRoute() {
+ return originalRoute;
+ }
+
+ /**
+ * Gets a list of additional tasks to execute after the {@link #configure()} method has been executed
+ * during the advice process.
+ *
+ * @return a list of additional {@link AdviceWithTask} tasks to be executed during the advice process.
+ */
+ public List<AdviceWithTask> getAdviceWithTasks() {
+ return adviceWithTasks;
+ }
+
/**
* Mock all endpoints in the route.
+ *
+ * @throws Exception can be thrown if error occurred
*/
public void mockEndpoints() throws Exception {
getContext().removeEndpoints("*");
@@ -37,7 +75,8 @@ public void mockEndpoints() throws Exception {
/**
* Mock all endpoints matching the given pattern.
*
- * @param pattern the pattern.
+ * @param pattern the pattern.
+ * @throws Exception can be thrown if error occurred
* @see org.apache.camel.util.EndpointHelper#matchEndpoint(String, String)
*/
public void mockEndpoints(String pattern) throws Exception {
@@ -45,4 +84,34 @@ public void mockEndpoints(String pattern) throws Exception {
getContext().addRegisterEndpointCallback(new InterceptSendToMockEndpointStrategy(pattern));
}
+ /**
+ * Advices by matching id of the nodes in the route.
+ * <p/>
+ * Uses the {@link org.apache.camel.util.EndpointHelper#matchPattern(String, String)} matching algorithm.
+ *
+ * @param pattern the pattern
+ * @return the builder
+ * @see org.apache.camel.util.EndpointHelper#matchPattern(String, String)
+ */
+ public AdviceWithBuilder adviceById(String pattern) {
+ ObjectHelper.notNull(originalRoute, "originalRoute", this);
+
+ return new AdviceWithBuilder(this, pattern, null);
+ }
+
+ /**
+ * Advices by matching the to string representation of the nodes in the route.
+ * <p/>
+ * Uses the {@link org.apache.camel.util.EndpointHelper#matchPattern(String, String)} matching algorithm.
+ *
+ * @param pattern the pattern
+ * @return the builder
+ * @see org.apache.camel.util.EndpointHelper#matchPattern(String, String)
+ */
+ public AdviceWithBuilder adviceByToString(String pattern) {
+ ObjectHelper.notNull(originalRoute, "originalRoute", this);
+
+ return new AdviceWithBuilder(this, null, pattern);
+ }
+
}
diff --git a/camel-core/src/main/java/org/apache/camel/builder/AdviceWithTask.java b/camel-core/src/main/java/org/apache/camel/builder/AdviceWithTask.java
new file mode 100644
index 0000000000000..7c29422d8ac6c
--- /dev/null
+++ b/camel-core/src/main/java/org/apache/camel/builder/AdviceWithTask.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.builder;
+
+/**
+ * Task or command being executed when using the advice with feature.
+ */
+public interface AdviceWithTask {
+
+ /**
+ * The task to execute
+ *
+ * @throws Exception is thrown if error during executing the task, or invalid input.
+ */
+ void task() throws Exception;
+
+}
diff --git a/camel-core/src/main/java/org/apache/camel/builder/AdviceWithTasks.java b/camel-core/src/main/java/org/apache/camel/builder/AdviceWithTasks.java
new file mode 100644
index 0000000000000..4a568d1dbf5b1
--- /dev/null
+++ b/camel-core/src/main/java/org/apache/camel/builder/AdviceWithTasks.java
@@ -0,0 +1,238 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.builder;
+
+import java.util.Iterator;
+
+import org.apache.camel.model.ProcessorDefinition;
+import org.apache.camel.model.ProcessorDefinitionHelper;
+import org.apache.camel.model.RouteDefinition;
+import org.apache.camel.util.EndpointHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * {@link AdviceWithTask} tasks which are used by the {@link AdviceWithRouteBuilder}.
+ */
+public final class AdviceWithTasks {
+
+ private static final Logger LOG = LoggerFactory.getLogger(AdviceWithTasks.class);
+
+ private AdviceWithTasks() {
+ // utility class
+ }
+
+ /**
+ * Match by is used for pluggable match by logic.
+ */
+ private interface MatchBy {
+
+ String getId();
+
+ boolean match(ProcessorDefinition processor);
+ }
+
+ /**
+ * Will match by id of the processor.
+ */
+ private static final class MatchById implements MatchBy {
+
+ private final String id;
+
+ private MatchById(String id) {
+ this.id = id;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public boolean match(ProcessorDefinition processor) {
+ return EndpointHelper.matchPattern(processor.getId(), id);
+ }
+ }
+
+ /**
+ * Will match by the to string representation of the processor.
+ */
+ private static final class MatchByToString implements MatchBy {
+
+ private final String toString;
+
+ private MatchByToString(String toString) {
+ this.toString = toString;
+ }
+
+ public String getId() {
+ return toString;
+ }
+
+ public boolean match(ProcessorDefinition processor) {
+ return EndpointHelper.matchPattern(processor.toString(), toString);
+ }
+ }
+
+
+ public static AdviceWithTask replaceByToString(final RouteDefinition route, final String toString, final ProcessorDefinition replace) {
+ return doReplace(route, new MatchByToString(toString), replace);
+ }
+
+ public static AdviceWithTask replaceById(final RouteDefinition route, final String id, final ProcessorDefinition replace) {
+ return doReplace(route, new MatchById(id), replace);
+ }
+
+ @SuppressWarnings("unchecked")
+ private static AdviceWithTask doReplace(final RouteDefinition route, final MatchBy matchBy, final ProcessorDefinition replace) {
+ return new AdviceWithTask() {
+ public void task() throws Exception {
+ boolean match = false;
+ Iterator<ProcessorDefinition> it = ProcessorDefinitionHelper.filterTypeInOutputs(route.getOutputs(), ProcessorDefinition.class);
+ while (it.hasNext()) {
+ ProcessorDefinition output = it.next();
+ if (matchBy.match(output)) {
+ ProcessorDefinition parent = output.getParent();
+ if (parent != null) {
+ int index = parent.getOutputs().indexOf(output);
+ if (index != -1) {
+ match = true;
+ parent.getOutputs().add(index + 1, replace);
+ Object old = parent.getOutputs().remove(index);
+ LOG.info("AdviceWith (" + matchBy.getId() + ") : [" + old + "] --> replace [" + replace + "]");
+ }
+ }
+ }
+ }
+
+ if (!match) {
+ throw new IllegalArgumentException("There are no outputs which matches: " + matchBy.getId() + " in the route: " + route);
+ }
+ }
+ };
+ }
+
+ public static AdviceWithTask removeByToString(final RouteDefinition route, final String toString) {
+ return doRemove(route, new MatchByToString(toString));
+ }
+
+ public static AdviceWithTask removeById(final RouteDefinition route, final String id) {
+ return doRemove(route, new MatchById(id));
+ }
+
+ @SuppressWarnings("unchecked")
+ private static AdviceWithTask doRemove(final RouteDefinition route, final MatchBy matchBy) {
+ return new AdviceWithTask() {
+ public void task() throws Exception {
+ boolean match = false;
+ Iterator<ProcessorDefinition> it = ProcessorDefinitionHelper.filterTypeInOutputs(route.getOutputs(), ProcessorDefinition.class);
+ while (it.hasNext()) {
+ ProcessorDefinition output = it.next();
+ if (matchBy.match(output)) {
+ ProcessorDefinition parent = output.getParent();
+ if (parent != null) {
+ int index = parent.getOutputs().indexOf(output);
+ if (index != -1) {
+ match = true;
+ Object old = parent.getOutputs().remove(index);
+ LOG.info("AdviceWith (" + matchBy.getId() + ") : [" + old + "] --> remove");
+ }
+ }
+ }
+ }
+
+ if (!match) {
+ throw new IllegalArgumentException("There are no outputs which matches: " + matchBy.getId() + " in the route: " + route);
+ }
+ }
+ };
+ }
+
+ public static AdviceWithTask beforeByToString(final RouteDefinition route, final String toString, final ProcessorDefinition before) {
+ return doBefore(route, new MatchByToString(toString), before);
+ }
+
+ public static AdviceWithTask beforeById(final RouteDefinition route, final String id, final ProcessorDefinition before) {
+ return doBefore(route, new MatchById(id), before);
+ }
+
+ @SuppressWarnings("unchecked")
+ private static AdviceWithTask doBefore(final RouteDefinition route, final MatchBy matchBy, final ProcessorDefinition before) {
+ return new AdviceWithTask() {
+ public void task() throws Exception {
+ boolean match = false;
+ Iterator<ProcessorDefinition> it = ProcessorDefinitionHelper.filterTypeInOutputs(route.getOutputs(), ProcessorDefinition.class);
+ while (it.hasNext()) {
+ ProcessorDefinition output = it.next();
+ if (matchBy.match(output)) {
+ ProcessorDefinition parent = output.getParent();
+ if (parent != null) {
+ int index = parent.getOutputs().indexOf(output);
+ if (index != -1) {
+ match = true;
+ Object existing = parent.getOutputs().get(index);
+ parent.getOutputs().add(index, before);
+ LOG.info("AdviceWith (" + matchBy.getId() + ") : [" + existing + "] --> before [" + before + "]");
+ }
+ }
+ }
+ }
+
+ if (!match) {
+ throw new IllegalArgumentException("There are no outputs which matches: " + matchBy.getId() + " in the route: " + route);
+ }
+ }
+ };
+ }
+
+ public static AdviceWithTask afterByToString(final RouteDefinition route, final String toString, final ProcessorDefinition after) {
+ return doAfter(route, new MatchByToString(toString), after);
+ }
+
+ public static AdviceWithTask afterById(final RouteDefinition route, final String id, final ProcessorDefinition after) {
+ return doAfter(route, new MatchById(id), after);
+ }
+
+ @SuppressWarnings("unchecked")
+ private static AdviceWithTask doAfter(final RouteDefinition route, final MatchBy matchBy, final ProcessorDefinition after) {
+ return new AdviceWithTask() {
+ public void task() throws Exception {
+ boolean match = false;
+ Iterator<ProcessorDefinition> it = ProcessorDefinitionHelper.filterTypeInOutputs(route.getOutputs(), ProcessorDefinition.class);
+ while (it.hasNext()) {
+ ProcessorDefinition output = it.next();
+ if (matchBy.match(output)) {
+
+ ProcessorDefinition parent = output.getParent();
+ if (parent != null) {
+ int index = parent.getOutputs().indexOf(output);
+ if (index != -1) {
+ match = true;
+ Object existing = parent.getOutputs().get(index);
+ parent.getOutputs().add(index + 1, after);
+ LOG.info("AdviceWith (" + matchBy.getId() + ") : [" + existing + "] --> after [" + after + "]");
+ }
+ }
+ }
+ }
+
+ if (!match) {
+ throw new IllegalArgumentException("There are no outputs which matches: " + matchBy.getId() + " in the route: " + route);
+ }
+ }
+ };
+ }
+
+}
diff --git a/camel-core/src/main/java/org/apache/camel/builder/RouteBuilder.java b/camel-core/src/main/java/org/apache/camel/builder/RouteBuilder.java
index dba8330ba815a..b34bb6570d046 100644
--- a/camel-core/src/main/java/org/apache/camel/builder/RouteBuilder.java
+++ b/camel-core/src/main/java/org/apache/camel/builder/RouteBuilder.java
@@ -30,6 +30,9 @@
import org.apache.camel.model.RouteDefinition;
import org.apache.camel.model.RoutesDefinition;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
/**
* A <a href="http://camel.apache.org/dsl.html">Java DSL</a> which is
* used to build {@link org.apache.camel.impl.DefaultRoute} instances in a {@link CamelContext} for smart routing.
@@ -37,6 +40,7 @@
* @version
*/
public abstract class RouteBuilder extends BuilderSupport implements RoutesBuilder {
+ protected Logger log = LoggerFactory.getLogger(getClass());
private AtomicBoolean initialized = new AtomicBoolean(false);
private RoutesDefinition routeCollection = new RoutesDefinition();
@@ -343,6 +347,7 @@ public RoutesDefinition getRouteCollection() {
/**
* Factory method
+ *
* @return the CamelContext
*/
protected CamelContext createContainer() {
@@ -356,7 +361,7 @@ protected void configureRoute(RouteDefinition route) {
/**
* Adds a collection of routes to this context
*
- * @param routes
+ * @param routes the routes
* @throws Exception if the routes could not be created for whatever reason
* @deprecated use {@link #includeRoutes(org.apache.camel.RoutesBuilder) includeRoutes} instead.
*/
diff --git a/camel-core/src/main/java/org/apache/camel/model/OutputDefinition.java b/camel-core/src/main/java/org/apache/camel/model/OutputDefinition.java
index 0c8184ae0d1a6..d6aca7071a371 100644
--- a/camel-core/src/main/java/org/apache/camel/model/OutputDefinition.java
+++ b/camel-core/src/main/java/org/apache/camel/model/OutputDefinition.java
@@ -47,4 +47,14 @@ public void setOutputs(List<ProcessorDefinition> outputs) {
}
}
}
+
+ @Override
+ public String getShortName() {
+ return "output";
+ }
+
+ @Override
+ public String toString() {
+ return getShortName() + " -> [" + outputs + "]";
+ }
}
diff --git a/camel-core/src/main/java/org/apache/camel/model/RouteDefinition.java b/camel-core/src/main/java/org/apache/camel/model/RouteDefinition.java
index 879552b7ce04e..ed995399f601e 100644
--- a/camel-core/src/main/java/org/apache/camel/model/RouteDefinition.java
+++ b/camel-core/src/main/java/org/apache/camel/model/RouteDefinition.java
@@ -37,6 +37,8 @@
import org.apache.camel.ServiceStatus;
import org.apache.camel.ShutdownRoute;
import org.apache.camel.ShutdownRunningTask;
+import org.apache.camel.builder.AdviceWithRouteBuilder;
+import org.apache.camel.builder.AdviceWithTask;
import org.apache.camel.builder.ErrorHandlerBuilder;
import org.apache.camel.builder.ErrorHandlerBuilderRef;
import org.apache.camel.builder.RouteBuilder;
@@ -179,6 +181,10 @@ public Endpoint resolveEndpoint(CamelContext camelContext, String uri) throws No
/**
* Advices this route with the route builder.
* <p/>
+ * You can use a regular {@link RouteBuilder} but the specialized {@link org.apache.camel.builder.AdviceWithRouteBuilder}
+ * has additional features when using the <a href="http://camel.apache.org/advicewith.html">advice with</a> feature.
+ * We therefore suggest you to use the {@link org.apache.camel.builder.AdviceWithRouteBuilder}.
+ * <p/>
* The advice process will add the interceptors, on exceptions, on completions etc. configured
* from the route builder to this route.
* <p/>
@@ -190,14 +196,29 @@ public Endpoint resolveEndpoint(CamelContext camelContext, String uri) throws No
* @param builder the route builder
* @return a new route which is this route merged with the route builder
* @throws Exception can be thrown from the route builder
+ * @see AdviceWithRouteBuilder
*/
public RouteDefinition adviceWith(CamelContext camelContext, RouteBuilder builder) throws Exception {
ObjectHelper.notNull(camelContext, "CamelContext");
ObjectHelper.notNull(builder, "RouteBuilder");
+ if (log.isDebugEnabled()) {
+ log.debug("AdviceWith route before: " + this);
+ }
+
+ // inject this route into the advice route builder so it can access this route
+ // and offer features to manipulate the route directly
+ if (builder instanceof AdviceWithRouteBuilder) {
+ ((AdviceWithRouteBuilder) builder).setOriginalRoute(this);
+ }
+
// configure and prepare the routes from the builder
RoutesDefinition routes = builder.configureRoutes(camelContext);
+ if (log.isDebugEnabled()) {
+ log.debug("AdviceWith routes: " + routes);
+ }
+
// we can only advice with a route builder without any routes
if (!routes.getRoutes().isEmpty()) {
throw new IllegalArgumentException("You can only advice from a RouteBuilder which has no existing routes."
@@ -211,12 +232,23 @@ public RouteDefinition adviceWith(CamelContext camelContext, RouteBuilder builde
// stop and remove this existing route
camelContext.removeRouteDefinition(this);
+ // any advice with tasks we should execute first?
+ if (builder instanceof AdviceWithRouteBuilder) {
+ List<AdviceWithTask> tasks = ((AdviceWithRouteBuilder) builder).getAdviceWithTasks();
+ for (AdviceWithTask task : tasks) {
+ task.task();
+ }
+ }
+
// now merge which also ensures that interceptors and the likes get mixed in correctly as well
RouteDefinition merged = routes.route(this);
// add the new merged route
camelContext.getRouteDefinitions().add(0, merged);
+ // log the merged route at info level to make it easier to end users to spot any mistakes they may have made
+ log.info("AdviceWith route after: " + merged);
+
// and start it
camelContext.startRoute(merged);
return merged;
diff --git a/camel-core/src/test/java/org/apache/camel/processor/interceptor/AdviceWithTasksMatchTest.java b/camel-core/src/test/java/org/apache/camel/processor/interceptor/AdviceWithTasksMatchTest.java
new file mode 100644
index 0000000000000..5cb4fe212267f
--- /dev/null
+++ b/camel-core/src/test/java/org/apache/camel/processor/interceptor/AdviceWithTasksMatchTest.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.processor.interceptor;
+
+import org.apache.camel.ContextTestSupport;
+import org.apache.camel.builder.AdviceWithRouteBuilder;
+import org.apache.camel.builder.RouteBuilder;
+
+/**
+ * Advice with match multiple ids test
+ */
+public class AdviceWithTasksMatchTest extends ContextTestSupport {
+
+ public void testReplaceMultipleIds() throws Exception {
+ context.getRouteDefinitions().get(0).adviceWith(context, new AdviceWithRouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ // replace all gold id's with the following route path
+ adviceById("gold*").replace().multicast().to("mock:a").to("mock:b");
+ }
+ });
+
+ getMockEndpoint("mock:foo").expectedMessageCount(0);
+ getMockEndpoint("mock:bar").expectedMessageCount(0);
+ getMockEndpoint("mock:a").expectedMessageCount(2);
+ getMockEndpoint("mock:b").expectedMessageCount(2);
+ getMockEndpoint("mock:result").expectedMessageCount(1);
+
+ template.sendBody("direct:start", "Hello World");
+
+ assertMockEndpointsSatisfied();
+ }
+
+
+ @Override
+ protected RouteBuilder createRouteBuilder() throws Exception {
+ return new RouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ from("direct:start")
+ .to("mock:foo").id("gold-1")
+ .to("mock:bar").id("gold-2")
+ .to("mock:result").id("silver-1");
+ }
+ };
+ }
+}
\ No newline at end of file
diff --git a/camel-core/src/test/java/org/apache/camel/processor/interceptor/AdviceWithTasksTest.java b/camel-core/src/test/java/org/apache/camel/processor/interceptor/AdviceWithTasksTest.java
new file mode 100644
index 0000000000000..7545742d743f3
--- /dev/null
+++ b/camel-core/src/test/java/org/apache/camel/processor/interceptor/AdviceWithTasksTest.java
@@ -0,0 +1,142 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.processor.interceptor;
+
+import org.apache.camel.ContextTestSupport;
+import org.apache.camel.builder.AdviceWithRouteBuilder;
+import org.apache.camel.builder.RouteBuilder;
+
+/**
+ * Advice with tests
+ */
+public class AdviceWithTasksTest extends ContextTestSupport {
+
+ public void testUnknownId() throws Exception {
+ try {
+ context.getRouteDefinitions().get(0).adviceWith(context, new AdviceWithRouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ adviceById("xxx").replace().to("mock:xxx");
+ }
+ });
+ fail("Should hve thrown exception");
+ } catch (IllegalArgumentException e) {
+ assertTrue(e.getMessage(), e.getMessage().startsWith("There are no outputs which matches: xxx in the route"));
+ }
+ }
+
+ public void testReplace() throws Exception {
+ // START SNIPPET: e1
+ context.getRouteDefinitions().get(0).adviceWith(context, new AdviceWithRouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ // advice the node in the route which has id = bar
+ // and replace it with the following route path
+ adviceById("bar").replace().multicast().to("mock:a").to("mock:b");
+ }
+ });
+ // END SNIPPET: e1
+
+ getMockEndpoint("mock:foo").expectedMessageCount(1);
+ getMockEndpoint("mock:bar").expectedMessageCount(0);
+ getMockEndpoint("mock:a").expectedMessageCount(1);
+ getMockEndpoint("mock:b").expectedMessageCount(1);
+ getMockEndpoint("mock:result").expectedMessageCount(1);
+
+ template.sendBody("direct:start", "Hello World");
+
+ assertMockEndpointsSatisfied();
+ }
+
+ public void testRemove() throws Exception {
+ // START SNIPPET: e2
+ context.getRouteDefinitions().get(0).adviceWith(context, new AdviceWithRouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ // advice the node in the route which has id = bar and remove it
+ adviceById("bar").remove();
+ }
+ });
+ // END SNIPPET: e2
+
+ getMockEndpoint("mock:foo").expectedMessageCount(1);
+ getMockEndpoint("mock:result").expectedMessageCount(1);
+
+ template.sendBody("direct:start", "Hello World");
+
+ assertMockEndpointsSatisfied();
+
+ assertFalse("Should not have removed id", context.hasEndpoint("mock:bar") == null);
+ }
+
+ public void testBefore() throws Exception {
+ // START SNIPPET: e3
+ context.getRouteDefinitions().get(0).adviceWith(context, new AdviceWithRouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ // advice the node in the route which has id = bar
+ // and insert the following route path before the adviced node
+ adviceById("bar").before().to("mock:a").transform(constant("Bye World"));
+ }
+ });
+ // END SNIPPET: e3
+
+ getMockEndpoint("mock:foo").expectedBodiesReceived("Hello World");
+ getMockEndpoint("mock:a").expectedBodiesReceived("Hello World");
+ getMockEndpoint("mock:bar").expectedBodiesReceived("Bye World");
+ getMockEndpoint("mock:result").expectedBodiesReceived("Bye World");
+
+ template.sendBody("direct:start", "Hello World");
+
+ assertMockEndpointsSatisfied();
+ }
+
+ public void testAfter() throws Exception {
+ // START SNIPPET: e4
+ context.getRouteDefinitions().get(0).adviceWith(context, new AdviceWithRouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ // advice the node in the route which has id = bar
+ // and insert the following route path after the advice node
+ adviceById("bar").after().to("mock:a").transform(constant("Bye World"));
+ }
+ });
+ // END SNIPPET: e4
+
+ getMockEndpoint("mock:foo").expectedBodiesReceived("Hello World");
+ getMockEndpoint("mock:a").expectedBodiesReceived("Hello World");
+ getMockEndpoint("mock:bar").expectedBodiesReceived("Hello World");
+ getMockEndpoint("mock:result").expectedBodiesReceived("Bye World");
+
+ template.sendBody("direct:start", "Hello World");
+
+ assertMockEndpointsSatisfied();
+ }
+
+ @Override
+ protected RouteBuilder createRouteBuilder() throws Exception {
+ return new RouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ from("direct:start")
+ .to("mock:foo")
+ .to("mock:bar").id("bar")
+ .to("mock:result");
+ }
+ };
+ }
+}
\ No newline at end of file
diff --git a/camel-core/src/test/java/org/apache/camel/processor/interceptor/AdviceWithTasksToStringPatternTest.java b/camel-core/src/test/java/org/apache/camel/processor/interceptor/AdviceWithTasksToStringPatternTest.java
new file mode 100644
index 0000000000000..f340167aa32e5
--- /dev/null
+++ b/camel-core/src/test/java/org/apache/camel/processor/interceptor/AdviceWithTasksToStringPatternTest.java
@@ -0,0 +1,131 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.processor.interceptor;
+
+import org.apache.camel.ContextTestSupport;
+import org.apache.camel.builder.AdviceWithRouteBuilder;
+import org.apache.camel.builder.RouteBuilder;
+
+/**
+ * Advice with using to string matching
+ */
+public class AdviceWithTasksToStringPatternTest extends ContextTestSupport {
+
+ public void testUnknownId() throws Exception {
+ try {
+ context.getRouteDefinitions().get(0).adviceWith(context, new AdviceWithRouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ adviceByToString("xxx").replace().to("mock:xxx");
+ }
+ });
+ fail("Should hve thrown exception");
+ } catch (IllegalArgumentException e) {
+ assertTrue(e.getMessage(), e.getMessage().startsWith("There are no outputs which matches: xxx in the route"));
+ }
+ }
+
+ public void testReplace() throws Exception {
+ // START SNIPPET: e1
+ context.getRouteDefinitions().get(0).adviceWith(context, new AdviceWithRouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ // advice nodes in the route which has foo anywhere in their to string representation
+ // and replace them with the following route path
+ adviceByToString(".*foo.*").replace().multicast().to("mock:a").to("mock:b");
+ }
+ });
+ // END SNIPPET: e1
+
+ getMockEndpoint("mock:foo").expectedMessageCount(0);
+ getMockEndpoint("mock:a").expectedMessageCount(1);
+ getMockEndpoint("mock:b").expectedMessageCount(1);
+ getMockEndpoint("mock:bar").expectedMessageCount(1);
+ getMockEndpoint("mock:result").expectedMessageCount(1);
+
+ template.sendBody("direct:start", "Hello World");
+
+ assertMockEndpointsSatisfied();
+ }
+
+ public void testRemove() throws Exception {
+ context.getRouteDefinitions().get(0).adviceWith(context, new AdviceWithRouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ adviceByToString(".*bar.*").remove();
+ }
+ });
+
+ getMockEndpoint("mock:foo").expectedMessageCount(1);
+ getMockEndpoint("mock:result").expectedMessageCount(1);
+
+ template.sendBody("direct:start", "Hello World");
+
+ assertMockEndpointsSatisfied();
+
+ assertFalse("Should not have removed id", context.hasEndpoint("mock:bar") == null);
+ }
+
+ public void testBefore() throws Exception {
+ context.getRouteDefinitions().get(0).adviceWith(context, new AdviceWithRouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ adviceByToString(".*bar.*").before().to("mock:a").transform(constant("Bye World"));
+ }
+ });
+
+ getMockEndpoint("mock:foo").expectedBodiesReceived("Hello World");
+ getMockEndpoint("mock:a").expectedBodiesReceived("Hello World");
+ getMockEndpoint("mock:bar").expectedBodiesReceived("Bye World");
+ getMockEndpoint("mock:result").expectedBodiesReceived("Bye World");
+
+ template.sendBody("direct:start", "Hello World");
+
+ assertMockEndpointsSatisfied();
+ }
+
+ public void testAfter() throws Exception {
+ context.getRouteDefinitions().get(0).adviceWith(context, new AdviceWithRouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ adviceByToString(".*bar.*").after().to("mock:a").transform(constant("Bye World"));
+ }
+ });
+
+ getMockEndpoint("mock:foo").expectedBodiesReceived("Hello World");
+ getMockEndpoint("mock:a").expectedBodiesReceived("Hello World");
+ getMockEndpoint("mock:bar").expectedBodiesReceived("Hello World");
+ getMockEndpoint("mock:result").expectedBodiesReceived("Bye World");
+
+ template.sendBody("direct:start", "Hello World");
+
+ assertMockEndpointsSatisfied();
+ }
+
+ @Override
+ protected RouteBuilder createRouteBuilder() throws Exception {
+ return new RouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+ from("direct:start")
+ .to("mock:foo")
+ .to("mock:bar")
+ .to("mock:result");
+ }
+ };
+ }
+}
\ No newline at end of file
|
cb51d3c5762e6f2808f9ab358943b3dc85e9bd95
|
elasticsearch
|
Sorting on _score in the URI format is reversed,- closes -1191.--
|
c
|
https://github.com/elastic/elasticsearch
|
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/modules/elasticsearch/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java
index 3cd3762651c9a..8ca9d706d4768 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java
@@ -64,8 +64,8 @@ public FieldSortBuilder(String fieldName) {
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(fieldName);
- if (order == SortOrder.DESC) {
- builder.field("reverse", true);
+ if (order != null) {
+ builder.field("order", order.toString());
}
if (missing != null) {
builder.field("missing", missing);
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/search/sort/SortOrder.java b/modules/elasticsearch/src/main/java/org/elasticsearch/search/sort/SortOrder.java
index 750cb601123f8..757f0fbc939c5 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/search/sort/SortOrder.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/search/sort/SortOrder.java
@@ -28,9 +28,17 @@ public enum SortOrder {
/**
* Ascending order.
*/
- ASC,
+ ASC {
+ @Override public String toString() {
+ return "asc";
+ }
+ },
/**
* Descending order.
*/
- DESC
+ DESC {
+ @Override public String toString() {
+ return "desc";
+ }
+ }
}
diff --git a/modules/test/integration/src/test/java/org/elasticsearch/test/integration/search/sort/SimpleSortTests.java b/modules/test/integration/src/test/java/org/elasticsearch/test/integration/search/sort/SimpleSortTests.java
index 37674b53a3580..8bc68550a8d49 100644
--- a/modules/test/integration/src/test/java/org/elasticsearch/test/integration/search/sort/SimpleSortTests.java
+++ b/modules/test/integration/src/test/java/org/elasticsearch/test/integration/search/sort/SimpleSortTests.java
@@ -29,6 +29,7 @@
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.integration.AbstractNodesTests;
+import org.hamcrest.Matchers;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
@@ -112,6 +113,41 @@ protected Client getClient() {
}
}
+ @Test public void testScoreSortDirection() throws Exception {
+ try {
+ client.admin().indices().prepareDelete("test").execute().actionGet();
+ } catch (Exception e) {
+ // ignore
+ }
+ client.admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("number_of_shards", 1)).execute().actionGet();
+ client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
+
+ client.prepareIndex("test", "type", "1").setSource("field", 2).execute().actionGet();
+ client.prepareIndex("test", "type", "2").setSource("field", 1).execute().actionGet();
+ client.prepareIndex("test", "type", "3").setSource("field", 0).execute().actionGet();
+
+ client.admin().indices().prepareRefresh().execute().actionGet();
+
+ SearchResponse searchResponse = client.prepareSearch("test").setQuery(customScoreQuery(matchAllQuery()).script("_source.field")).execute().actionGet();
+ assertThat(searchResponse.hits().getAt(0).getId(), equalTo("1"));
+ assertThat(searchResponse.hits().getAt(1).score(), Matchers.lessThan(searchResponse.hits().getAt(0).score()));
+ assertThat(searchResponse.hits().getAt(1).getId(), equalTo("2"));
+ assertThat(searchResponse.hits().getAt(2).score(), Matchers.lessThan(searchResponse.hits().getAt(1).score()));
+ assertThat(searchResponse.hits().getAt(2).getId(), equalTo("3"));
+
+ searchResponse = client.prepareSearch("test").setQuery(customScoreQuery(matchAllQuery()).script("_source.field")).addSort("_score", SortOrder.DESC).execute().actionGet();
+ assertThat(searchResponse.hits().getAt(0).getId(), equalTo("1"));
+ assertThat(searchResponse.hits().getAt(1).score(), Matchers.lessThan(searchResponse.hits().getAt(0).score()));
+ assertThat(searchResponse.hits().getAt(1).getId(), equalTo("2"));
+ assertThat(searchResponse.hits().getAt(2).score(), Matchers.lessThan(searchResponse.hits().getAt(1).score()));
+ assertThat(searchResponse.hits().getAt(2).getId(), equalTo("3"));
+
+ searchResponse = client.prepareSearch("test").setQuery(customScoreQuery(matchAllQuery()).script("_source.field")).addSort("_score", SortOrder.DESC).execute().actionGet();
+ assertThat(searchResponse.hits().getAt(2).getId(), equalTo("3"));
+ assertThat(searchResponse.hits().getAt(1).getId(), equalTo("2"));
+ assertThat(searchResponse.hits().getAt(0).getId(), equalTo("1"));
+ }
+
@Test public void testSimpleSortsSingleShard() throws Exception {
testSimpleSorts(1);
}
|
bc79855cf59458c38498b449f3b8ab99fa1191c9
|
kotlin
|
Minor: additional logging when failing to read- file in VirtualFileKotlinClass--
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/kotlin/VirtualFileKotlinClass.java b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/kotlin/VirtualFileKotlinClass.java
index a63970f91bd2d..76305bd1cf1d3 100644
--- a/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/kotlin/VirtualFileKotlinClass.java
+++ b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/kotlin/VirtualFileKotlinClass.java
@@ -16,6 +16,7 @@
package org.jetbrains.jet.lang.resolve.kotlin;
+import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.vfs.VirtualFile;
import kotlin.Function0;
@@ -34,12 +35,12 @@
import org.jetbrains.jet.storage.StorageManager;
import org.jetbrains.jet.utils.UtilsPackage;
-import java.io.IOException;
-
import static org.jetbrains.asm4.ClassReader.*;
import static org.jetbrains.asm4.Opcodes.ASM4;
public class VirtualFileKotlinClass implements KotlinJvmBinaryClass {
+ private final static Logger LOG = Logger.getInstance(VirtualFileKotlinClass.class);
+
private final VirtualFile file;
private final NotNullLazyValue<JvmClassName> className;
private final NullableLazyValue<KotlinClassHeader> classHeader;
@@ -80,7 +81,8 @@ public void visit(int version, int access, String name, String signature, String
}
}, SKIP_CODE | SKIP_DEBUG | SKIP_FRAMES);
}
- catch (IOException e) {
+ catch (Throwable e) {
+ logFileReadingError(e);
throw UtilsPackage.rethrow(e);
}
return classNameRef.get();
@@ -112,7 +114,8 @@ public void visitEnd() {
}
}, SKIP_CODE | SKIP_DEBUG | SKIP_FRAMES);
}
- catch (IOException e) {
+ catch (Throwable e) {
+ logFileReadingError(e);
throw UtilsPackage.rethrow(e);
}
}
@@ -196,7 +199,8 @@ public void visitEnd() {
}
}, SKIP_CODE | SKIP_DEBUG | SKIP_FRAMES);
}
- catch (IOException e) {
+ catch (Throwable e) {
+ logFileReadingError(e);
throw UtilsPackage.rethrow(e);
}
}
@@ -221,4 +225,13 @@ public boolean equals(Object obj) {
public String toString() {
return getClass().getSimpleName() + ": " + file.toString();
}
+
+ private void logFileReadingError(@NotNull Throwable e) {
+ LOG.error(
+ "Could not read file: " + file.getPath() + "\n"
+ + "Size in bytes: " + file.getLength() + "\n"
+ + "File type: " + file.getFileType().getName(),
+ e
+ );
+ }
}
\ No newline at end of file
|
c9c816ed3fb54e7ecf12c2a4bc06014b4fe277b5
|
ReactiveX-RxJava
|
Refactor test to use CountDownLatch instead of- Thread.sleep--
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/operators/OperationObserveOn.java b/rxjava-core/src/main/java/rx/operators/OperationObserveOn.java
index 4f94cda6d5..4a09797f85 100644
--- a/rxjava-core/src/main/java/rx/operators/OperationObserveOn.java
+++ b/rxjava-core/src/main/java/rx/operators/OperationObserveOn.java
@@ -15,12 +15,17 @@
*/
package rx.operators;
-import static org.mockito.Matchers.*;
+import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
-import org.junit.Test;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+import org.junit.Test;
import org.mockito.InOrder;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+
import rx.Observable;
import rx.Observer;
import rx.Scheduler;
@@ -66,7 +71,6 @@ public void testObserveOn() {
verify(observer, times(1)).onCompleted();
}
-
@Test
@SuppressWarnings("unchecked")
public void testOrdering() throws InterruptedException {
@@ -76,9 +80,21 @@ public void testOrdering() throws InterruptedException {
InOrder inOrder = inOrder(observer);
+ final CountDownLatch completedLatch = new CountDownLatch(1);
+ doAnswer(new Answer<Void>() {
+
+ @Override
+ public Void answer(InvocationOnMock invocation) throws Throwable {
+ completedLatch.countDown();
+ return null;
+ }
+ }).when(observer).onCompleted();
+
obs.observeOn(Schedulers.threadPoolForComputation()).subscribe(observer);
- Thread.sleep(500); // !!! not a true unit test
+ if (!completedLatch.await(1000, TimeUnit.MILLISECONDS)) {
+ fail("timed out waiting");
+ }
inOrder.verify(observer, times(1)).onNext("one");
inOrder.verify(observer, times(1)).onNext(null);
|
1cda4eacbf8e44d03ecdfb6f16ab523306677cbc
|
restlet-framework-java
|
- The Request-isConfidential() method has- been refactored to be supported by Message and Response as well.- The method Request-setConfidential() has been removed (back to- Restlet 1.0 state). Added Protocol-isConfidential() method to- support the new implementation which rely on Request-getProtocol().- Reported by Kevin Conaway.--
|
p
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/build/tmpl/text/changes.txt b/build/tmpl/text/changes.txt
index 3bb3740fe2..51208741e0 100644
--- a/build/tmpl/text/changes.txt
+++ b/build/tmpl/text/changes.txt
@@ -44,6 +44,12 @@ Changes log
"style" attributes are required.
- Added the ability the give a title to WadlApplication or
WadlResource instances. Suggested by Jérôme Bernard.
+ - The Request#isConfidential() method has been refactored to
+ be supported by Message and Response as well. The method
+ Request#setConfidential() has been removed (back to Restlet
+ 1.0 state). Added Protocol#isConfidential() method to support
+ the new implementation which rely on Request#getProtocol().
+ Reported by Kevin Conaway.
- Misc
- Updated JAX-RS API to version 1.0. The implementation of the
runtime environment is not fully finished yet. We are
diff --git a/modules/com.noelios.restlet/src/com/noelios/restlet/component/ChildClientDispatcher.java b/modules/com.noelios.restlet/src/com/noelios/restlet/component/ChildClientDispatcher.java
index 4239efb2cf..6d9f19d948 100644
--- a/modules/com.noelios.restlet/src/com/noelios/restlet/component/ChildClientDispatcher.java
+++ b/modules/com.noelios.restlet/src/com/noelios/restlet/component/ChildClientDispatcher.java
@@ -72,9 +72,6 @@ public void doHandle(Request request, Response response) {
final Protocol protocol = request.getProtocol();
if (protocol.equals(Protocol.RIAP)) {
- // Consider that the request is confidential
- request.setConfidential(true);
-
// Let's dispatch it
final LocalReference cr = new LocalReference(request
.getResourceRef());
diff --git a/modules/com.noelios.restlet/src/com/noelios/restlet/component/ComponentClientDispatcher.java b/modules/com.noelios.restlet/src/com/noelios/restlet/component/ComponentClientDispatcher.java
index fc66f6e836..d81f983b14 100644
--- a/modules/com.noelios.restlet/src/com/noelios/restlet/component/ComponentClientDispatcher.java
+++ b/modules/com.noelios.restlet/src/com/noelios/restlet/component/ComponentClientDispatcher.java
@@ -64,9 +64,6 @@ protected void doHandle(Request request, Response response) {
final Protocol protocol = request.getProtocol();
if (protocol.equals(Protocol.RIAP)) {
- // Consider that the request is confidential
- request.setConfidential(true);
-
// Let's dispatch it
final LocalReference cr = new LocalReference(request
.getResourceRef());
diff --git a/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpRequest.java b/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpRequest.java
index ab6f7fa405..120613d7f6 100644
--- a/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpRequest.java
+++ b/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpRequest.java
@@ -103,14 +103,6 @@ public HttpRequest(Context context, HttpServerCall httpCall) {
// Set the properties
setMethod(Method.valueOf(httpCall.getMethod()));
- if (getHttpCall().isConfidential()) {
- setConfidential(true);
- } else {
- // We don't want to autocreate the security data just for this
- // information, because that will by the default value of this
- // property if read by someone.
- }
-
// Set the host reference
final StringBuilder sb = new StringBuilder();
sb.append(httpCall.getProtocol().getSchemeName()).append("://");
diff --git a/modules/org.restlet/src/org/restlet/data/Message.java b/modules/org.restlet/src/org/restlet/data/Message.java
index d5ba7847fc..72fdfcd15e 100644
--- a/modules/org.restlet/src/org/restlet/data/Message.java
+++ b/modules/org.restlet/src/org/restlet/data/Message.java
@@ -78,49 +78,50 @@ public Message(Representation entity) {
this.saxRepresentation = null;
}
-/**
- * Returns the modifiable map of attributes that can be used by developers
- * to save information relative to the message. Creates a new instance if no
- * one has been set. This is an easier alternative to the creation of a
- * wrapper instance around the whole message.<br>
- * <br>
- *
- * In addition, this map is a shared space between the developer and the
- * connectors. In this case, it is used to exchange information that is not
- * uniform across all protocols and couldn't therefore be directly included
- * in the API. For this purpose, all attribute names starting with
- * "org.restlet" are reserved. Currently the following attributes are used:
- * <table>
- * <tr>
- * <th>Attribute name</th>
- * <th>Class name</th>
- * <th>Description</th>
- * </tr>
- * <tr>
- * <td>org.restlet.http.headers</td>
- * <td>org.restlet.data.Form</td>
- * <td>Server HTTP connectors must provide all request headers and client
- * HTTP connectors must provide all response headers, exactly as they were
- * received. In addition, developers can also use this attribute to specify
- * <b>non-standard</b> headers that should be added to the request or to
- * the response. </td>
- * </tr>
- * <tr>
- * <td>org.restlet.https.clientCertificates</td>
- * <td>List<java.security.cert.Certificate></td>
- * <td>For requests received via a secure connector, indicates the ordered
- * list of client certificates, if they are available and accessible.</td>
- * </tr>
- * </table><br>
- * Most of the standard HTTP headers are directly supported via the Restlet
- * API. Thus, adding such HTTP headers is forbidden because it could
- * conflict with the connector's internal behavior, limit portability or
- * prevent future optimizations. The other standard HTTP headers (that are
- * not supported) can be added as attributes via the
- * "org.restlet.http.headers" key.<br>
- *
- * @return The modifiable attributes map.
- */
+ /**
+ * Returns the modifiable map of attributes that can be used by developers
+ * to save information relative to the message. Creates a new instance if no
+ * one has been set. This is an easier alternative to the creation of a
+ * wrapper instance around the whole message.<br>
+ * <br>
+ *
+ * In addition, this map is a shared space between the developer and the
+ * connectors. In this case, it is used to exchange information that is not
+ * uniform across all protocols and couldn't therefore be directly included
+ * in the API. For this purpose, all attribute names starting with
+ * "org.restlet" are reserved. Currently the following attributes are used:
+ * <table>
+ * <tr>
+ * <th>Attribute name</th>
+ * <th>Class name</th>
+ * <th>Description</th>
+ * </tr>
+ * <tr>
+ * <td>org.restlet.http.headers</td>
+ * <td>org.restlet.data.Form</td>
+ * <td>Server HTTP connectors must provide all request headers and client
+ * HTTP connectors must provide all response headers, exactly as they were
+ * received. In addition, developers can also use this attribute to specify
+ * <b>non-standard</b> headers that should be added to the request or to the
+ * response.</td>
+ * </tr>
+ * <tr>
+ * <td>org.restlet.https.clientCertificates</td>
+ * <td>List<java.security.cert.Certificate></td>
+ * <td>For requests received via a secure connector, indicates the ordered
+ * list of client certificates, if they are available and accessible.</td>
+ * </tr>
+ * </table>
+ * <br>
+ * Most of the standard HTTP headers are directly supported via the Restlet
+ * API. Thus, adding such HTTP headers is forbidden because it could
+ * conflict with the connector's internal behavior, limit portability or
+ * prevent future optimizations. The other standard HTTP headers (that are
+ * not supported) can be added as attributes via the
+ * "org.restlet.http.headers" key.<br>
+ *
+ * @return The modifiable attributes map.
+ */
public Map<String, Object> getAttributes() {
if (this.attributes == null) {
this.attributes = new TreeMap<String, Object>();
@@ -234,6 +235,14 @@ public SaxRepresentation getEntityAsSax() {
return this.saxRepresentation;
}
+ /**
+ * Indicates if the message was or will be exchanged confidentially, for
+ * example via a SSL-secured connection.
+ *
+ * @return True if the message is confidential.
+ */
+ public abstract boolean isConfidential();
+
/**
* Indicates if a content is available and can be sent. Several conditions
* must be met: the content must exists and have some available data.
diff --git a/modules/org.restlet/src/org/restlet/data/Protocol.java b/modules/org.restlet/src/org/restlet/data/Protocol.java
index f21d806ff5..65669bbd3e 100644
--- a/modules/org.restlet/src/org/restlet/data/Protocol.java
+++ b/modules/org.restlet/src/org/restlet/data/Protocol.java
@@ -59,7 +59,7 @@ public final class Protocol extends Metadata {
* @see org.restlet.data.LocalReference
*/
public static final Protocol CLAP = new Protocol("clap", "CLAP",
- "Class Loader Access Protocol", UNKNOWN_PORT);
+ "Class Loader Access Protocol", UNKNOWN_PORT, true);
/**
* FILE is a standard scheme to access to representations stored in the file
@@ -72,7 +72,7 @@ public final class Protocol extends Metadata {
* @see org.restlet.data.LocalReference
*/
public static final Protocol FILE = new Protocol("file", "FILE",
- "Local File System Protocol", UNKNOWN_PORT);
+ "Local File System Protocol", UNKNOWN_PORT, true);
/** FTP protocol. */
public static final Protocol FTP = new Protocol("ftp", "FTP",
@@ -84,7 +84,7 @@ public final class Protocol extends Metadata {
/** HTTPS protocol (via SSL socket). */
public static final Protocol HTTPS = new Protocol("https", "HTTPS",
- "HyperText Transport Protocol (Secure)", 443);
+ "HyperText Transport Protocol (Secure)", 443, true);
/**
* JAR (Java ARchive) is a common scheme to access to representations inside
@@ -94,7 +94,7 @@ public final class Protocol extends Metadata {
* @see org.restlet.data.LocalReference
*/
public static final Protocol JAR = new Protocol("jar", "JAR",
- "Java ARchive", UNKNOWN_PORT);
+ "Java ARchive", UNKNOWN_PORT, true);
/** JDBC protocol. */
public static final Protocol JDBC = new Protocol("jdbc", "JDBC",
@@ -106,7 +106,7 @@ public final class Protocol extends Metadata {
/** POPS protocol (via SSL/TLS socket).. */
public static final Protocol POPS = new Protocol("pops", "POPS",
- "Post Office Protocol (Secure)", 995);
+ "Post Office Protocol (Secure)", 995, true);
/**
* RIAP (Restlet Internal Access Protocol) is a custom scheme to access
@@ -120,7 +120,7 @@ public final class Protocol extends Metadata {
* @see org.restlet.data.LocalReference
*/
public static final Protocol RIAP = new Protocol("riap", "RIAP",
- "Restlet Internal Access Protocol", UNKNOWN_PORT);
+ "Restlet Internal Access Protocol", UNKNOWN_PORT, true);
/** SMTP protocol. */
public static final Protocol SMTP = new Protocol("smtp", "SMTP",
@@ -135,15 +135,16 @@ public final class Protocol extends Metadata {
@Deprecated
public static final Protocol SMTP_STARTTLS = new Protocol("smtp",
"SMTP_STARTTLS",
- "Simple Mail Transfer Protocol (starting a TLS encryption)", 25);
+ "Simple Mail Transfer Protocol (starting a TLS encryption)", 25,
+ true);
/** SMTPS protocol (via SSL/TLS socket). */
public static final Protocol SMTPS = new Protocol("smtps", "SMTPS",
- "Simple Mail Transfer Protocol (Secure)", 465);
+ "Simple Mail Transfer Protocol (Secure)", 465, true);
/** Local Web Archive access protocol. */
public static final Protocol WAR = new Protocol("war", "WAR",
- "Web Archive Access Protocol", UNKNOWN_PORT);
+ "Web Archive Access Protocol", UNKNOWN_PORT, true);
/**
* Creates the protocol associated to a URI scheme name. If an existing
@@ -195,6 +196,9 @@ public static Protocol valueOf(final String name) {
return result;
}
+ /** The confidentiality. */
+ private volatile boolean confidential;
+
/** The default port if known or -1. */
private volatile int defaultPort;
@@ -226,9 +230,30 @@ public Protocol(final String schemeName) {
*/
public Protocol(final String schemeName, final String name,
final String description, int defaultPort) {
+ this(schemeName, name, description, defaultPort, false);
+ }
+
+ /**
+ * Constructor.
+ *
+ * @param schemeName
+ * The scheme name.
+ * @param name
+ * The unique name.
+ * @param description
+ * The description.
+ * @param defaultPort
+ * The default port.
+ * @param confidential
+ * The confidentiality.
+ */
+ public Protocol(final String schemeName, final String name,
+ final String description, int defaultPort,
+ final boolean confidential) {
super(name, description);
this.schemeName = schemeName;
this.defaultPort = defaultPort;
+ this.confidential = confidential;
}
/** {@inheritDoc} */
@@ -261,4 +286,15 @@ public String getSchemeName() {
public int hashCode() {
return (getName() == null) ? 0 : getName().toLowerCase().hashCode();
}
+
+ /**
+ * Indicates if the protocol guarantees the confidentially of the messages
+ * exchanged, for example via a SSL-secured connection.
+ *
+ * @return True if the protocol is confidential.
+ */
+ public boolean isConfidential() {
+ return this.confidential;
+ }
+
}
diff --git a/modules/org.restlet/src/org/restlet/data/Request.java b/modules/org.restlet/src/org/restlet/data/Request.java
index 9c81cbd5d5..300e05e0ab 100644
--- a/modules/org.restlet/src/org/restlet/data/Request.java
+++ b/modules/org.restlet/src/org/restlet/data/Request.java
@@ -109,9 +109,6 @@ public static Request getCurrent() {
/** The condition data. */
private volatile Conditions conditions;
- /** Indicates if the call came over a confidential channel. */
- private volatile boolean confidential;
-
/** The cookies provided by the client. */
private volatile Series<Cookie> cookies;
@@ -140,7 +137,6 @@ public static Request getCurrent() {
* Constructor.
*/
public Request() {
- this.confidential = false;
}
/**
@@ -387,13 +383,12 @@ public Reference getRootRef() {
}
/**
- * Indicates if the call came over a confidential channel such as an
- * SSL-secured connection.
- *
- * @return True if the call came over a confidential channel.
+ * Implemented based on the {@link Protocol#isConfidential()} method for the
+ * request's protocol returned by {@link #getProtocol()};
*/
+ @Override
public boolean isConfidential() {
- return this.confidential;
+ return (getProtocol() == null) ? false : getProtocol().isConfidential();
}
/**
@@ -444,17 +439,6 @@ public void setConditions(Conditions conditions) {
this.conditions = conditions;
}
- /**
- * Indicates if the call came over a confidential channel such as an
- * SSL-secured connection.
- *
- * @param confidential
- * True if the call came over a confidential channel.
- */
- public void setConfidential(boolean confidential) {
- this.confidential = confidential;
- }
-
/**
* Sets the cookies provided by the client.
*
diff --git a/modules/org.restlet/src/org/restlet/data/Response.java b/modules/org.restlet/src/org/restlet/data/Response.java
index cd34d0b5a1..b88c96c3ef 100644
--- a/modules/org.restlet/src/org/restlet/data/Response.java
+++ b/modules/org.restlet/src/org/restlet/data/Response.java
@@ -307,6 +307,11 @@ public Status getStatus() {
return this.status;
}
+ @Override
+ public boolean isConfidential() {
+ return getRequest().isConfidential();
+ }
+
/**
* Permanently redirects the client to a target URI. The client is expected
* to reuse the same method for the new request.
diff --git a/modules/org.restlet/src/org/restlet/util/WrapperRequest.java b/modules/org.restlet/src/org/restlet/util/WrapperRequest.java
index 039e608531..a8c52196d0 100644
--- a/modules/org.restlet/src/org/restlet/util/WrapperRequest.java
+++ b/modules/org.restlet/src/org/restlet/util/WrapperRequest.java
@@ -315,18 +315,6 @@ public void setChallengeResponse(ChallengeResponse response) {
getWrappedRequest().setChallengeResponse(response);
}
- /**
- * Indicates if the call came over a confidential channel such as an
- * SSL-secured connection.
- *
- * @param confidential
- * True if the call came over a confidential channel.
- */
- @Override
- public void setConfidential(boolean confidential) {
- getWrappedRequest().setConfidential(confidential);
- }
-
/**
* Sets the entity from a higher-level object. This object is converted to a
* representation using the Application's converter service. If you want to
diff --git a/modules/org.restlet/src/org/restlet/util/WrapperResponse.java b/modules/org.restlet/src/org/restlet/util/WrapperResponse.java
index f75306c2c1..1fc4fdf01d 100644
--- a/modules/org.restlet/src/org/restlet/util/WrapperResponse.java
+++ b/modules/org.restlet/src/org/restlet/util/WrapperResponse.java
@@ -301,6 +301,17 @@ protected Response getWrappedResponse() {
return this.wrappedResponse;
}
+ /**
+ * Indicates if the call came over a confidential channel such as an
+ * SSL-secured connection.
+ *
+ * @return True if the call came over a confidential channel.
+ */
+ @Override
+ public boolean isConfidential() {
+ return getWrappedResponse().isConfidential();
+ }
+
/**
* Indicates if a content is available and can be sent. Several conditions
* must be met: the content must exists and have some available data.
|
9e843df27fc2dd53c12327e9601206a7c677bd1f
|
orientdb
|
Improved auto-recognize of type in JSON reader--
|
p
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerJSON.java b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerJSON.java
index 7876b46308a..1122716eb39 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerJSON.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerJSON.java
@@ -62,6 +62,8 @@ public class ORecordSerializerJSON extends ORecordSerializerStringAbstract {
public static final char[] PARAMETER_SEPARATOR = new char[] { ':', ',' };
private static final Long MAX_INT = new Long(Integer.MAX_VALUE);
private static final Long MIN_INT = new Long(Integer.MIN_VALUE);
+ private static final Double MAX_FLOAT = new Double(Float.MAX_VALUE);
+ private static final Double MIN_FLOAT = new Double(Float.MIN_VALUE);
private SimpleDateFormat dateFormat = new SimpleDateFormat(DEF_DATE_FORMAT);
@@ -306,10 +308,21 @@ else if (iType == OType.LINKLIST)
// TRY TO AUTODETERMINE THE BEST TYPE
if (iFieldValue.charAt(0) == ORID.PREFIX && iFieldValue.contains(":"))
iType = OType.LINK;
- else if (OStringSerializerHelper.contains(iFieldValue, '.'))
- iType = OType.FLOAT;
- else {
+ else if (OStringSerializerHelper.contains(iFieldValue, '.')) {
+ // DECIMAL FORMAT: DETERMINE IF DOUBLE OR FLOAT
+ final Double v = new Double(OStringSerializerHelper.getStringContent(iFieldValue));
+ if (v.doubleValue() > 0) {
+ // POSITIVE NUMBER
+ if (v.compareTo(MAX_FLOAT) <= 0)
+ return v.floatValue();
+ } else if (v.compareTo(MIN_FLOAT) >= 0)
+ // NEGATIVE NUMBER
+ return v.floatValue();
+
+ return v;
+ } else {
final Long v = new Long(OStringSerializerHelper.getStringContent(iFieldValue));
+ // INTEGER FORMAT: DETERMINE IF DOUBLE OR FLOAT
if (v.longValue() > 0) {
// POSITIVE NUMBER
if (v.compareTo(MAX_INT) <= 0)
|
86335c9fb2587613d01c926308bbdb00db1fb562
|
hbase
|
HBASE-2365 Double-assignment around split--git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@929856 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index aaf62492aa23..abb1a3e238e6 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -257,7 +257,10 @@ Release 0.21.0 - Unreleased
HBASE-2358 Store doReconstructionLog will fail if oldlogfile.log is empty
and won't load region (Cosmin Lehene via Stack)
HBASE-2370 saveVersion.sh doesnt properly grab the git revision
+ HBASE-2373 Remove confusing log message of how "BaseScanner GET got
+ different address/startcode than SCAN"
HBASE-2361 WALEdit broke replication scope
+ HBASE-2365 Double-assignment around split
IMPROVEMENTS
HBASE-1760 Cleanup TODOs in HTable
diff --git a/core/src/main/java/org/apache/hadoop/hbase/master/RegionManager.java b/core/src/main/java/org/apache/hadoop/hbase/master/RegionManager.java
index 636b4a95815f..24f9c14cced6 100644
--- a/core/src/main/java/org/apache/hadoop/hbase/master/RegionManager.java
+++ b/core/src/main/java/org/apache/hadoop/hbase/master/RegionManager.java
@@ -960,7 +960,7 @@ public void setUnassigned(HRegionInfo info, boolean force) {
regionsInTransition.put(info.getRegionNameAsString(), s);
}
}
- if (force || (!s.isPendingOpen() || !s.isOpen())) {
+ if (force || (!s.isPendingOpen() && !s.isOpen())) {
s.setUnassigned();
}
}
|
1d54f3f4837da65091711f7765511af1a0a7cbd5
|
orientdb
|
Fixed problem with remote connection to- distributed storage--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemote.java b/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemote.java
index 0e08aa1208d..e46f23c7f3c 100755
--- a/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemote.java
+++ b/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemote.java
@@ -1628,7 +1628,7 @@ protected void parseServerURLs() {
} else {
name = url.substring(url.lastIndexOf("/") + 1);
for (String host : url.substring(0, dbPos).split(ADDRESS_SEPARATOR))
- host = addHost(host);
+ addHost(host);
}
if (serverURLs.size() == 1 && OGlobalConfiguration.NETWORK_BINARY_DNS_LOADBALANCING_ENABLED.getValueAsBoolean()) {
@@ -1913,6 +1913,8 @@ public void updateClusterConfiguration(final byte[] obj) {
if (members != null) {
serverURLs.clear();
+ parseServerURLs();
+
for (ODocument m : members)
if (m != null && !serverURLs.contains((String) m.field("id"))) {
for (Map<String, Object> listener : ((Collection<Map<String, Object>>) m.field("listeners"))) {
|
be560b58da1c89e56091cd109f620d7eda3a24fb
|
ReactiveX-RxJava
|
Incorporate review suggestions.--- Changes finally0 to finallyDo.-- Removes unnecessary subscription-wrapping.-- Handle exceptions in onCompleted/onError-
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/Observable.java b/rxjava-core/src/main/java/rx/Observable.java
index 631e175751..fd14a42ce3 100644
--- a/rxjava-core/src/main/java/rx/Observable.java
+++ b/rxjava-core/src/main/java/rx/Observable.java
@@ -1192,8 +1192,8 @@ public static <T> Observable<T> concat(Observable<T>... source) {
* @return an Observable that emits the same objects, then calls the action.
* @see <a href="http://msdn.microsoft.com/en-us/library/hh212133(v=vs.103).aspx">MSDN: Observable.Finally Method</a>
*/
- public static <T> Observable<T> finally0(Observable source, Action0 action) {
- return _create(OperationFinally.finally0(source, action));
+ public static <T> Observable<T> finallyDo(Observable source, Action0 action) {
+ return _create(OperationFinally.finallyDo(source, action));
}
/**
@@ -2463,8 +2463,8 @@ public Observable<T> filter(Func1<T, Boolean> predicate) {
* @return an Observable that emits the same objects as this observable, then calls the action.
* @see <a href="http://msdn.microsoft.com/en-us/library/hh212133(v=vs.103).aspx">MSDN: Observable.Finally Method</a>
*/
- public Observable<T> finally0(Action0 action) {
- return _create(OperationFinally.finally0(this, action));
+ public Observable<T> finallyDo(Action0 action) {
+ return _create(OperationFinally.finallyDo(this, action));
}
/**
diff --git a/rxjava-core/src/main/java/rx/operators/OperationFinally.java b/rxjava-core/src/main/java/rx/operators/OperationFinally.java
index d90b0572a6..636a8e61ae 100644
--- a/rxjava-core/src/main/java/rx/operators/OperationFinally.java
+++ b/rxjava-core/src/main/java/rx/operators/OperationFinally.java
@@ -33,12 +33,10 @@ public final class OperationFinally {
/**
* Call a given action when a sequence completes (with or without an
* exception). The returned observable is exactly as threadsafe as the
- * source observable; in particular, any situation allowing the source to
- * call onComplete or onError multiple times allows the returned observable
- * to call the final action multiple times.
+ * source observable.
* <p/>
* Note that "finally" is a Java reserved word and cannot be an identifier,
- * so we use "finally0".
+ * so we use "finallyDo".
*
* @param sequence An observable sequence of elements
* @param action An action to be taken when the sequence is complete or throws an exception
@@ -48,7 +46,7 @@ public final class OperationFinally {
* the given action will be called.
* @see <a href="http://msdn.microsoft.com/en-us/library/hh212133(v=vs.103).aspx">MSDN Observable.Finally method</a>
*/
- public static <T> Func1<Observer<T>, Subscription> finally0(final Observable<T> sequence, final Action0 action) {
+ public static <T> Func1<Observer<T>, Subscription> finallyDo(final Observable<T> sequence, final Action0 action) {
return new Func1<Observer<T>, Subscription>() {
@Override
public Subscription call(Observer<T> observer) {
@@ -60,26 +58,14 @@ public Subscription call(Observer<T> observer) {
private static class Finally<T> implements Func1<Observer<T>, Subscription> {
private final Observable<T> sequence;
private final Action0 finalAction;
- private Subscription s;
Finally(final Observable<T> sequence, Action0 finalAction) {
this.sequence = sequence;
this.finalAction = finalAction;
}
- private final AtomicObservableSubscription Subscription = new AtomicObservableSubscription();
-
- private final Subscription actualSubscription = new Subscription() {
- @Override
- public void unsubscribe() {
- if (null != s)
- s.unsubscribe();
- }
- };
-
public Subscription call(Observer<T> observer) {
- s = sequence.subscribe(new FinallyObserver(observer));
- return Subscription.wrap(actualSubscription);
+ return sequence.subscribe(new FinallyObserver(observer));
}
private class FinallyObserver implements Observer<T> {
@@ -91,14 +77,20 @@ private class FinallyObserver implements Observer<T> {
@Override
public void onCompleted() {
- observer.onCompleted();
- finalAction.call();
+ try {
+ observer.onCompleted();
+ } finally {
+ finalAction.call();
+ }
}
@Override
public void onError(Exception e) {
- observer.onError(e);
- finalAction.call();
+ try {
+ observer.onError(e);
+ } finally {
+ finalAction.call();
+ }
}
@Override
@@ -117,7 +109,7 @@ public void before() {
aObserver = mock(Observer.class);
}
private void checkActionCalled(Observable<String> input) {
- Observable.create(finally0(input, aAction0)).subscribe(aObserver);
+ Observable.create(finallyDo(input, aAction0)).subscribe(aObserver);
verify(aAction0, times(1)).call();
}
@Test
|
33b59a403ff3d1297bc8378a4ab995aff0e6ec73
|
intellij-community
|
PY-996--
|
a
|
https://github.com/JetBrains/intellij-community
|
diff --git a/python/src/com/jetbrains/python/actions/AddFieldQuickFix.java b/python/src/com/jetbrains/python/actions/AddFieldQuickFix.java
index e595307a13eba..68c4338e4bb8e 100644
--- a/python/src/com/jetbrains/python/actions/AddFieldQuickFix.java
+++ b/python/src/com/jetbrains/python/actions/AddFieldQuickFix.java
@@ -43,7 +43,7 @@ public String getFamilyName() {
}
@Nullable
- private static PsiElement appendToInit(PyFunction init, Function<String, PyStatement> callback) {
+ public static PsiElement appendToInit(PyFunction init, Function<String, PyStatement> callback) {
// add this field as the last stmt of the constructor
final PyStatementList stmt_list = init.getStatementList();
PyStatement[] stmts = stmt_list.getStatements(); // NOTE: rather wasteful, consider iterable stmt list
diff --git a/python/src/com/jetbrains/python/refactoring/introduce/PyIntroduceDialog.form b/python/src/com/jetbrains/python/refactoring/introduce/PyIntroduceDialog.form
index 936b4b4f25e90..d56e5206d202b 100644
--- a/python/src/com/jetbrains/python/refactoring/introduce/PyIntroduceDialog.form
+++ b/python/src/com/jetbrains/python/refactoring/introduce/PyIntroduceDialog.form
@@ -110,4 +110,11 @@
</vspacer>
</children>
</grid>
+ <buttonGroups>
+ <group name="placeButtonGroup">
+ <member id="2a16a"/>
+ <member id="9196d"/>
+ <member id="7fc5b"/>
+ </group>
+ </buttonGroups>
</form>
diff --git a/python/src/com/jetbrains/python/refactoring/introduce/field/FieldIntroduceHandler.java b/python/src/com/jetbrains/python/refactoring/introduce/field/FieldIntroduceHandler.java
index 0bc67dd995da6..9da92cb66fa64 100644
--- a/python/src/com/jetbrains/python/refactoring/introduce/field/FieldIntroduceHandler.java
+++ b/python/src/com/jetbrains/python/refactoring/introduce/field/FieldIntroduceHandler.java
@@ -14,6 +14,7 @@
import com.jetbrains.python.PyNames;
import com.jetbrains.python.actions.AddFieldQuickFix;
import com.jetbrains.python.psi.*;
+import com.jetbrains.python.psi.impl.PyFunctionBuilder;
import com.jetbrains.python.refactoring.PyRefactoringUtil;
import com.jetbrains.python.refactoring.introduce.IntroduceHandler;
import com.jetbrains.python.refactoring.introduce.variable.VariableIntroduceHandler;
@@ -74,17 +75,31 @@ protected PsiElement addDeclaration(@NotNull PsiElement expression, @NotNull Psi
final PsiElement expr = expression instanceof PyClass ? expression : expression.getParent();
PsiElement anchor = PyUtil.getContainingClassOrSelf(expr);
assert anchor instanceof PyClass;
+ final PyClass clazz = (PyClass)anchor;
+ final Project project = anchor.getProject();
if (initInConstructor == InitPlace.CONSTRUCTOR) {
- final Project project = anchor.getProject();
- final PyClass clazz = (PyClass)anchor;
- AddFieldQuickFix.addFieldToInit(project, clazz, "", new AddFieldDeclaration(declaration));
- final PyFunction init = clazz.findMethodByName(PyNames.INIT, false);
- final PyStatementList statements = init != null ? init.getStatementList() : null;
- return statements != null ? statements.getLastChild() : null;
+ return AddFieldQuickFix.addFieldToInit(project, clazz, "", new AddFieldDeclaration(declaration));
+ } else if (initInConstructor == InitPlace.SET_UP) {
+ return addFieldToSetUp(project, clazz, declaration);
}
return VariableIntroduceHandler.doIntroduceVariable(expression, declaration, occurrences, replaceAll);
}
+ @Nullable
+ private static PsiElement addFieldToSetUp(Project project, PyClass clazz, PsiElement declaration) {
+ final PyFunction init = clazz.findMethodByName(PythonUnitTestUtil.TESTCASE_SETUP_NAME, false);
+ if (init != null) {
+ return AddFieldQuickFix.appendToInit(init, new AddFieldDeclaration(declaration));
+ }
+ final PyFunctionBuilder builder = new PyFunctionBuilder(PythonUnitTestUtil.TESTCASE_SETUP_NAME);
+ builder.parameter(PyNames.CANONICAL_SELF);
+ PyFunction setUp = builder.buildFunction(project);
+ final PyStatementList statements = clazz.getStatementList();
+ final PsiElement anchor = statements.getFirstChild();
+ setUp = (PyFunction)statements.addBefore(setUp, anchor);
+ return AddFieldQuickFix.appendToInit(setUp, new AddFieldDeclaration(declaration));
+ }
+
@Override
protected PyExpression createExpression(Project project, String name, PyAssignmentStatement declaration) {
final String text = declaration.getText();
diff --git a/python/src/com/jetbrains/python/testing/PythonUnitTestUtil.java b/python/src/com/jetbrains/python/testing/PythonUnitTestUtil.java
index 83f6b1f768b46..9e6ccb729e1f5 100644
--- a/python/src/com/jetbrains/python/testing/PythonUnitTestUtil.java
+++ b/python/src/com/jetbrains/python/testing/PythonUnitTestUtil.java
@@ -13,6 +13,7 @@
*/
public class PythonUnitTestUtil {
private static final String TESTCASE_CLASS_NAME = "TestCase";
+ public static final String TESTCASE_SETUP_NAME = "setUp";
private static final String UNITTEST_FILE_NAME = "unittest.py";
private static final String TESTCASE_METHOD_PREFIX = "test";
|
f958ffcc8a52d92385cc29ab896eaf294d695815
|
orientdb
|
Fixed issue on browsing of entire cluster/class--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/iterator/ORecordIteratorCluster.java b/core/src/main/java/com/orientechnologies/orient/core/iterator/ORecordIteratorCluster.java
index 8f898a1b469..a0ff55f2bf1 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/iterator/ORecordIteratorCluster.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/iterator/ORecordIteratorCluster.java
@@ -43,7 +43,7 @@ public ORecordIteratorCluster(final ODatabaseRecord<REC> iDatabase, final ODatab
currentClusterId = iClusterId;
rangeFrom = -1;
rangeTo = -1;
- clusterSize = database.countClusterElements(currentClusterId);
+ clusterSize = database.getStorage().getClusterLastEntryPosition(currentClusterId);
}
@Override
|
9b07112e30871a4a4f8253c8418e93417dcdad97
|
drools
|
JBRULES-1906: NPE when LiteralRestriction value is- set to null--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@24509 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-verifier/src/main/java/org/drools/verifier/components/Field.java b/drools-verifier/src/main/java/org/drools/verifier/components/Field.java
index 4e4b6463cc8..453f1b59d12 100644
--- a/drools-verifier/src/main/java/org/drools/verifier/components/Field.java
+++ b/drools-verifier/src/main/java/org/drools/verifier/components/Field.java
@@ -18,6 +18,7 @@ public static class FieldType {
public static final FieldType VARIABLE = new FieldType("Variable");
public static final FieldType OBJECT = new FieldType("Object");
public static final FieldType ENUM = new FieldType("Enum");
+ public static final FieldType UNKNOWN = new FieldType("Unknown");
private final String string;
diff --git a/drools-verifier/src/main/java/org/drools/verifier/components/LiteralRestriction.java b/drools-verifier/src/main/java/org/drools/verifier/components/LiteralRestriction.java
index 73133feb2db..756d31dac0d 100644
--- a/drools-verifier/src/main/java/org/drools/verifier/components/LiteralRestriction.java
+++ b/drools-verifier/src/main/java/org/drools/verifier/components/LiteralRestriction.java
@@ -8,7 +8,7 @@
import org.drools.verifier.report.components.Cause;
/**
- *
+ *
* @author Toni Rikkola
*/
public class LiteralRestriction extends Restriction implements Cause {
@@ -31,7 +31,7 @@ public RestrictionType getRestrictionType() {
/**
* Compares two LiteralRestrictions by value.
- *
+ *
* @param restriction
* Restriction that this object is compared to.
* @return a negative integer, zero, or a positive integer as this object is
@@ -68,6 +68,8 @@ public int compareValues(LiteralRestriction restriction)
}
} else if (valueType == Field.FieldType.STRING) {
return stringValue.compareTo(restriction.getValueAsString());
+ } else if (valueType == Field.FieldType.UNKNOWN) {
+ return 0;
}
throw new DataFormatException("Value types did not match. Value type "
@@ -109,6 +111,15 @@ public Date getDateValue() {
public void setValue(String value) {
+ if (value == null) {
+ stringValue = null;
+ valueType = Field.FieldType.UNKNOWN;
+ return;
+ }
+
+ stringValue = value;
+ valueType = Field.FieldType.STRING;
+
if ("true".equals(value) || "false".equals(value)) {
booleanValue = value.equals("true");
valueType = Field.FieldType.BOOLEAN;
@@ -147,11 +158,9 @@ public void setValue(String value) {
// Not a date.
}
- stringValue = value;
- valueType = Field.FieldType.STRING;
}
- public boolean isBooleanValue() {
+ public boolean getBooleanValue() {
return booleanValue;
}
diff --git a/drools-verifier/src/test/java/org/drools/verifier/components/LiteralRestrictionTest.java b/drools-verifier/src/test/java/org/drools/verifier/components/LiteralRestrictionTest.java
new file mode 100644
index 00000000000..668e9bed5c7
--- /dev/null
+++ b/drools-verifier/src/test/java/org/drools/verifier/components/LiteralRestrictionTest.java
@@ -0,0 +1,44 @@
+package org.drools.verifier.components;
+
+import junit.framework.TestCase;
+
+public class LiteralRestrictionTest extends TestCase {
+
+ public void testSetValue() {
+ LiteralRestriction booleanRestriction = new LiteralRestriction();
+ booleanRestriction.setValue("true");
+
+ assertEquals(Field.FieldType.BOOLEAN, booleanRestriction.getValueType());
+ assertEquals(true, booleanRestriction.getBooleanValue());
+
+ LiteralRestriction intRestriction = new LiteralRestriction();
+ intRestriction.setValue("1");
+
+ assertEquals(Field.FieldType.INT, intRestriction.getValueType());
+ assertEquals(1, intRestriction.getIntValue());
+
+ LiteralRestriction doubleRestriction = new LiteralRestriction();
+ doubleRestriction.setValue("1.0");
+
+ assertEquals(Field.FieldType.DOUBLE, doubleRestriction.getValueType());
+ assertEquals(1.0, doubleRestriction.getDoubleValue());
+
+ LiteralRestriction dateRestriction = new LiteralRestriction();
+ dateRestriction.setValue("11-jan-2008");
+
+ assertEquals(Field.FieldType.DATE, dateRestriction.getValueType());
+
+ LiteralRestriction stringRestriction = new LiteralRestriction();
+ stringRestriction.setValue("test test");
+
+ assertEquals(Field.FieldType.STRING, stringRestriction.getValueType());
+ assertEquals("test test", stringRestriction.getValueAsString());
+
+ LiteralRestriction nullRestriction = new LiteralRestriction();
+ nullRestriction.setValue(null);
+
+ assertEquals(Field.FieldType.UNKNOWN, nullRestriction.getValueType());
+ assertEquals(null, nullRestriction.getValueAsString());
+ assertEquals(null, nullRestriction.getValueAsObject());
+ }
+}
|
b02e6dc996d3985a8a136f290c4a8810ce05aaab
|
elasticsearch
|
Migrating NodesInfo API to use plugins instead of- singular plugin--In order to be consistent (and because in 1.0 we switched from-parameter driven information to specifzing the metrics as part of the URI)-this patch moves from 'plugin' to 'plugins' in the Nodes Info API.-
|
p
|
https://github.com/elastic/elasticsearch
|
diff --git a/docs/reference/cluster/nodes-info.asciidoc b/docs/reference/cluster/nodes-info.asciidoc
index 96b5fb5b9e3e9..61d8c1a5a6a1f 100644
--- a/docs/reference/cluster/nodes-info.asciidoc
+++ b/docs/reference/cluster/nodes-info.asciidoc
@@ -17,7 +17,7 @@ The second command selectively retrieves nodes information of only
By default, it just returns all attributes and core settings for a node.
It also allows to get only information on `settings`, `os`, `process`, `jvm`,
-`thread_pool`, `network`, `transport`, `http` and `plugin`:
+`thread_pool`, `network`, `transport`, `http` and `plugins`:
[source,js]
--------------------------------------------------
@@ -30,9 +30,9 @@ curl -XGET 'http://localhost:9200/_nodes/nodeId1,nodeId2/info/jvm,process'
curl -XGET 'http://localhost:9200/_nodes/nodeId1,nodeId2/_all
--------------------------------------------------
-The `all` flag can be set to return all the information - or you can simply omit it.
+The `_all` flag can be set to return all the information - or you can simply omit it.
-`plugin` - if set, the result will contain details about the loaded
+`plugins` - if set, the result will contain details about the loaded
plugins per node:
* `name`: plugin name
diff --git a/rest-api-spec/api/nodes.info.json b/rest-api-spec/api/nodes.info.json
index 4885fa6f9d084..e121e3047aba6 100644
--- a/rest-api-spec/api/nodes.info.json
+++ b/rest-api-spec/api/nodes.info.json
@@ -12,7 +12,7 @@
},
"metric": {
"type": "list",
- "options": ["settings", "os", "process", "jvm", "thread_pool", "network", "transport", "http", "plugin"],
+ "options": ["settings", "os", "process", "jvm", "thread_pool", "network", "transport", "http", "plugins"],
"description": "A comma-separated list of metrics you wish returned. Leave empty to return all."
}
},
diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java
index b9ab8d323e4a7..589471af36dfd 100644
--- a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java
+++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java
@@ -38,7 +38,7 @@ public class NodesInfoRequest extends NodesOperationRequest<NodesInfoRequest> {
private boolean network = true;
private boolean transport = true;
private boolean http = true;
- private boolean plugin = true;
+ private boolean plugins = true;
public NodesInfoRequest() {
}
@@ -63,7 +63,7 @@ public NodesInfoRequest clear() {
network = false;
transport = false;
http = false;
- plugin = false;
+ plugins = false;
return this;
}
@@ -79,7 +79,7 @@ public NodesInfoRequest all() {
network = true;
transport = true;
http = true;
- plugin = true;
+ plugins = true;
return this;
}
@@ -205,19 +205,19 @@ public NodesInfoRequest http(boolean http) {
/**
* Should information about plugins be returned
- * @param plugin true if you want info
+ * @param plugins true if you want info
* @return The request
*/
- public NodesInfoRequest plugin(boolean plugin) {
- this.plugin = plugin;
+ public NodesInfoRequest plugins(boolean plugins) {
+ this.plugins = plugins;
return this;
}
/**
* @return true if information about plugins is requested
*/
- public boolean plugin() {
- return plugin;
+ public boolean plugins() {
+ return plugins;
}
@Override
@@ -231,7 +231,7 @@ public void readFrom(StreamInput in) throws IOException {
network = in.readBoolean();
transport = in.readBoolean();
http = in.readBoolean();
- plugin = in.readBoolean();
+ plugins = in.readBoolean();
}
@Override
@@ -245,6 +245,6 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(network);
out.writeBoolean(transport);
out.writeBoolean(http);
- out.writeBoolean(plugin);
+ out.writeBoolean(plugins);
}
}
diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequestBuilder.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequestBuilder.java
index f119522b3a2d6..c6c0cc19f9cfc 100644
--- a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequestBuilder.java
+++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequestBuilder.java
@@ -113,8 +113,11 @@ public NodesInfoRequestBuilder setHttp(boolean http) {
return this;
}
- public NodesInfoRequestBuilder setPlugin(boolean plugin) {
- request().plugin(plugin);
+ /**
+ * Should the node plugins info be returned.
+ */
+ public NodesInfoRequestBuilder setPlugins(boolean plugins) {
+ request().plugins(plugins);
return this;
}
diff --git a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java
index 735ea418b601d..1392ab9b52685 100644
--- a/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java
+++ b/src/main/java/org/elasticsearch/action/admin/cluster/node/info/TransportNodesInfoAction.java
@@ -98,7 +98,7 @@ protected NodeInfo newNodeResponse() {
protected NodeInfo nodeOperation(NodeInfoRequest nodeRequest) throws ElasticsearchException {
NodesInfoRequest request = nodeRequest.request;
return nodeService.info(request.settings(), request.os(), request.process(), request.jvm(), request.threadPool(),
- request.network(), request.transport(), request.http(), request.plugin());
+ request.network(), request.transport(), request.http(), request.plugins());
}
@Override
diff --git a/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java b/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java
index fe22ffb6f6462..35d2ff5fde1c6 100644
--- a/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java
+++ b/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/info/RestNodesInfoAction.java
@@ -44,7 +44,7 @@
public class RestNodesInfoAction extends BaseRestHandler {
private final SettingsFilter settingsFilter;
- private final static Set<String> ALLOWED_METRICS = Sets.newHashSet("http", "jvm", "network", "os", "plugin", "process", "settings", "thread_pool", "transport");
+ private final static Set<String> ALLOWED_METRICS = Sets.newHashSet("http", "jvm", "network", "os", "plugins", "process", "settings", "thread_pool", "transport");
@Inject
public RestNodesInfoAction(Settings settings, Client client, RestController controller,
@@ -99,7 +99,7 @@ public void handleRequest(final RestRequest request, final RestChannel channel)
nodesInfoRequest.network(metrics.contains("network"));
nodesInfoRequest.transport(metrics.contains("transport"));
nodesInfoRequest.http(metrics.contains("http"));
- nodesInfoRequest.plugin(metrics.contains("plugin"));
+ nodesInfoRequest.plugins(metrics.contains("plugins"));
}
client.admin().cluster().nodesInfo(nodesInfoRequest, new ActionListener<NodesInfoResponse>() {
diff --git a/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoTests.java b/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoTests.java
index 6104a7cf3eeb3..8a3d733e96c1b 100644
--- a/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoTests.java
+++ b/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoTests.java
@@ -129,7 +129,7 @@ public void testNodeInfoPlugin() throws URISyntaxException {
ClusterHealthResponse clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForGreenStatus()).actionGet();
logger.info("--> done cluster_health, status " + clusterHealth.getStatus());
- NodesInfoResponse response = client().admin().cluster().prepareNodesInfo().clear().setPlugin(true).execute().actionGet();
+ NodesInfoResponse response = client().admin().cluster().prepareNodesInfo().clear().setPlugins(true).execute().actionGet();
logger.info("--> full json answer, status " + response.toString());
assertNodeContainsPlugins(response, server1NodeId,
diff --git a/src/test/java/org/elasticsearch/plugin/PluginManagerTests.java b/src/test/java/org/elasticsearch/plugin/PluginManagerTests.java
index 1cb1dea67b1e8..2d69135f1765e 100644
--- a/src/test/java/org/elasticsearch/plugin/PluginManagerTests.java
+++ b/src/test/java/org/elasticsearch/plugin/PluginManagerTests.java
@@ -143,7 +143,7 @@ private static void downloadAndExtract(String pluginName, String pluginUrl) thro
}
private void assertPluginLoaded(String pluginName) {
- NodesInfoResponse nodesInfoResponse = client().admin().cluster().prepareNodesInfo().clear().setPlugin(true).get();
+ NodesInfoResponse nodesInfoResponse = client().admin().cluster().prepareNodesInfo().clear().setPlugins(true).get();
assertThat(nodesInfoResponse.getNodes().length, equalTo(1));
assertThat(nodesInfoResponse.getNodes()[0].getPlugins().getInfos(), notNullValue());
assertThat(nodesInfoResponse.getNodes()[0].getPlugins().getInfos().size(), equalTo(1));
|
22b5df0be228a82f0ed3802c87860cfc3d3ce9ed
|
restlet-framework-java
|
- Fixed bug causing unit tests to fail--
|
c
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/module/org.restlet/src/org/restlet/data/Form.java b/module/org.restlet/src/org/restlet/data/Form.java
index 987143338c..06311102d0 100644
--- a/module/org.restlet/src/org/restlet/data/Form.java
+++ b/module/org.restlet/src/org/restlet/data/Form.java
@@ -111,7 +111,7 @@ public Form(String queryString) {
@Override
public Parameter createEntry(String name, String value) {
- return new Parameter();
+ return new Parameter(name, value);
}
@Override
diff --git a/module/org.restlet/src/org/restlet/util/Series.java b/module/org.restlet/src/org/restlet/util/Series.java
index a98ba1a45e..e9a3b23761 100644
--- a/module/org.restlet/src/org/restlet/util/Series.java
+++ b/module/org.restlet/src/org/restlet/util/Series.java
@@ -30,7 +30,7 @@
*/
public interface Series<E extends Series.Entry> extends List<E> {
/**
- * A sequence entry.
+ * A named series entry.
*
* @author Jerome Louvel ([email protected])
*/
|
06d24042b64d6fa0e179b5845990068f849d9ce5
|
hadoop
|
YARN-1185. Fixed FileSystemRMStateStore to not- leave partial files that prevent subsequent ResourceManager recovery.- Contributed by Omkar Vinit Joshi. svn merge --ignore-ancestry -c 1533803- ../../trunk/--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1533805 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 27cf02c418127..8c0ea418f00a0 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -105,6 +105,9 @@ Release 2.2.1 - UNRELEASED
YARN-1295. In UnixLocalWrapperScriptBuilder, using bash -c can cause Text
file busy errors (Sandy Ryza)
+ YARN-1185. Fixed FileSystemRMStateStore to not leave partial files that
+ prevent subsequent ResourceManager recovery. (Omkar Vinit Joshi via vinodkv)
+
Release 2.2.0 - 2013-10-13
INCOMPATIBLE CHANGES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/FileSystemRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/FileSystemRMStateStore.java
index 062f5cc55329e..e85ba924a1a74 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/FileSystemRMStateStore.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/FileSystemRMStateStore.java
@@ -22,6 +22,7 @@
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
+import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@@ -118,6 +119,9 @@ private void loadRMAppState(RMState rmState) throws Exception {
for (FileStatus childNodeStatus : fs.listStatus(appDir.getPath())) {
assert childNodeStatus.isFile();
String childNodeName = childNodeStatus.getPath().getName();
+ if (checkAndRemovePartialRecord(childNodeStatus.getPath())) {
+ continue;
+ }
byte[] childData =
readFile(childNodeStatus.getPath(), childNodeStatus.getLen());
if (childNodeName.startsWith(ApplicationId.appIdStrPrefix)) {
@@ -178,12 +182,28 @@ private void loadRMAppState(RMState rmState) throws Exception {
}
}
+ private boolean checkAndRemovePartialRecord(Path record) throws IOException {
+ // If the file ends with .tmp then it shows that it failed
+ // during saving state into state store. The file will be deleted as a
+ // part of this call
+ if (record.getName().endsWith(".tmp")) {
+ LOG.error("incomplete rm state store entry found :"
+ + record);
+ fs.delete(record, false);
+ return true;
+ }
+ return false;
+ }
+
private void loadRMDTSecretManagerState(RMState rmState) throws Exception {
FileStatus[] childNodes = fs.listStatus(rmDTSecretManagerRoot);
for(FileStatus childNodeStatus : childNodes) {
assert childNodeStatus.isFile();
String childNodeName = childNodeStatus.getPath().getName();
+ if (checkAndRemovePartialRecord(childNodeStatus.getPath())) {
+ continue;
+ }
if(childNodeName.startsWith(DELEGATION_TOKEN_SEQUENCE_NUMBER_PREFIX)) {
rmState.rmSecretManagerState.dtSequenceNumber =
Integer.parseInt(childNodeName.split("_")[1]);
@@ -344,10 +364,19 @@ private byte[] readFile(Path inputPath, long len) throws Exception {
return data;
}
+ /*
+ * In order to make this write atomic as a part of write we will first write
+ * data to .tmp file and then rename it. Here we are assuming that rename is
+ * atomic for underlying file system.
+ */
private void writeFile(Path outputPath, byte[] data) throws Exception {
- FSDataOutputStream fsOut = fs.create(outputPath, false);
+ Path tempPath =
+ new Path(outputPath.getParent(), outputPath.getName() + ".tmp");
+ FSDataOutputStream fsOut = null;
+ fsOut = fs.create(tempPath, false);
fsOut.write(data);
fsOut.close();
+ fs.rename(tempPath, outputPath);
}
private boolean renameFile(Path src, Path dst) throws Exception {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStoreTestBase.java
similarity index 80%
rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestRMStateStore.java
rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStoreTestBase.java
index d75fc7d9e18a6..72ef37fa23658 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestRMStateStore.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStoreTestBase.java
@@ -39,6 +39,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -75,9 +76,9 @@
import org.junit.Test;
-public class TestRMStateStore extends ClientBaseWithFixes{
+public class RMStateStoreTestBase extends ClientBaseWithFixes{
- public static final Log LOG = LogFactory.getLog(TestRMStateStore.class);
+ public static final Log LOG = LogFactory.getLog(RMStateStoreTestBase.class);
static class TestDispatcher implements
Dispatcher, EventHandler<RMAppAttemptStoredEvent> {
@@ -116,104 +117,6 @@ interface RMStateStoreHelper {
boolean isFinalStateValid() throws Exception;
}
- @Test
- public void testZKRMStateStoreRealZK() throws Exception {
- TestZKRMStateStoreTester zkTester = new TestZKRMStateStoreTester();
- testRMAppStateStore(zkTester);
- testRMDTSecretManagerStateStore(zkTester);
- }
-
- @Test
- public void testFSRMStateStore() throws Exception {
- HdfsConfiguration conf = new HdfsConfiguration();
- MiniDFSCluster cluster =
- new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
- try {
- TestFSRMStateStoreTester fsTester = new TestFSRMStateStoreTester(cluster);
- testRMAppStateStore(fsTester);
- testRMDTSecretManagerStateStore(fsTester);
- } finally {
- cluster.shutdown();
- }
- }
-
- class TestZKRMStateStoreTester implements RMStateStoreHelper {
- ZooKeeper client;
- ZKRMStateStore store;
-
- class TestZKRMStateStore extends ZKRMStateStore {
- public TestZKRMStateStore(Configuration conf, String workingZnode)
- throws Exception {
- init(conf);
- start();
- assertTrue(znodeWorkingPath.equals(workingZnode));
- }
-
- @Override
- public ZooKeeper getNewZooKeeper() throws IOException {
- return client;
- }
- }
-
- public RMStateStore getRMStateStore() throws Exception {
- String workingZnode = "/Test";
- YarnConfiguration conf = new YarnConfiguration();
- conf.set(YarnConfiguration.ZK_RM_STATE_STORE_ADDRESS, hostPort);
- conf.set(YarnConfiguration.ZK_RM_STATE_STORE_PARENT_PATH, workingZnode);
- this.client = createClient();
- this.store = new TestZKRMStateStore(conf, workingZnode);
- return this.store;
- }
-
- @Override
- public boolean isFinalStateValid() throws Exception {
- List<String> nodes = client.getChildren(store.znodeWorkingPath, false);
- return nodes.size() == 1;
- }
- }
-
- class TestFSRMStateStoreTester implements RMStateStoreHelper {
- Path workingDirPathURI;
- FileSystemRMStateStore store;
- MiniDFSCluster cluster;
-
- class TestFileSystemRMStore extends FileSystemRMStateStore {
- TestFileSystemRMStore(Configuration conf) throws Exception {
- init(conf);
- Assert.assertNull(fs);
- assertTrue(workingDirPathURI.equals(fsWorkingPath));
- start();
- Assert.assertNotNull(fs);
- }
- }
-
- public TestFSRMStateStoreTester(MiniDFSCluster cluster) throws Exception {
- Path workingDirPath = new Path("/Test");
- this.cluster = cluster;
- FileSystem fs = cluster.getFileSystem();
- fs.mkdirs(workingDirPath);
- Path clusterURI = new Path(cluster.getURI());
- workingDirPathURI = new Path(clusterURI, workingDirPath);
- fs.close();
- }
-
- @Override
- public RMStateStore getRMStateStore() throws Exception {
- YarnConfiguration conf = new YarnConfiguration();
- conf.set(YarnConfiguration.FS_RM_STATE_STORE_URI,
- workingDirPathURI.toString());
- this.store = new TestFileSystemRMStore(conf);
- return store;
- }
-
- @Override
- public boolean isFinalStateValid() throws Exception {
- FileSystem fs = cluster.getFileSystem();
- FileStatus[] files = fs.listStatus(workingDirPathURI);
- return files.length == 1;
- }
- }
-
void waitNotify(TestDispatcher dispatcher) {
long startTime = System.currentTimeMillis();
while(!dispatcher.notified) {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestFSRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestFSRMStateStore.java
new file mode 100644
index 0000000000000..a1a6eab3fd3d5
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestFSRMStateStore.java
@@ -0,0 +1,120 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.resourcemanager.recovery;
+
+import static org.junit.Assert.assertTrue;
+import junit.framework.Assert;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.HdfsConfiguration;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.util.ConverterUtils;
+import org.junit.Test;
+
+public class TestFSRMStateStore extends RMStateStoreTestBase {
+
+ public static final Log LOG = LogFactory.getLog(TestFSRMStateStore.class);
+
+ class TestFSRMStateStoreTester implements RMStateStoreHelper {
+
+ Path workingDirPathURI;
+ FileSystemRMStateStore store;
+ MiniDFSCluster cluster;
+
+ class TestFileSystemRMStore extends FileSystemRMStateStore {
+
+ TestFileSystemRMStore(Configuration conf) throws Exception {
+ init(conf);
+ Assert.assertNull(fs);
+ assertTrue(workingDirPathURI.equals(fsWorkingPath));
+ start();
+ Assert.assertNotNull(fs);
+ }
+ }
+
+ public TestFSRMStateStoreTester(MiniDFSCluster cluster) throws Exception {
+ Path workingDirPath = new Path("/Test");
+ this.cluster = cluster;
+ FileSystem fs = cluster.getFileSystem();
+ fs.mkdirs(workingDirPath);
+ Path clusterURI = new Path(cluster.getURI());
+ workingDirPathURI = new Path(clusterURI, workingDirPath);
+ fs.close();
+ }
+
+ @Override
+ public RMStateStore getRMStateStore() throws Exception {
+ YarnConfiguration conf = new YarnConfiguration();
+ conf.set(YarnConfiguration.FS_RM_STATE_STORE_URI,
+ workingDirPathURI.toString());
+ this.store = new TestFileSystemRMStore(conf);
+ return store;
+ }
+
+ @Override
+ public boolean isFinalStateValid() throws Exception {
+ FileSystem fs = cluster.getFileSystem();
+ FileStatus[] files = fs.listStatus(workingDirPathURI);
+ return files.length == 1;
+ }
+ }
+
+ @Test
+ public void testFSRMStateStore() throws Exception {
+ HdfsConfiguration conf = new HdfsConfiguration();
+ MiniDFSCluster cluster =
+ new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
+ try {
+ TestFSRMStateStoreTester fsTester = new TestFSRMStateStoreTester(cluster);
+ // If the state store is FileSystemRMStateStore then add corrupted entry.
+ // It should discard the entry and remove it from file system.
+ FSDataOutputStream fsOut = null;
+ FileSystemRMStateStore fileSystemRMStateStore =
+ (FileSystemRMStateStore) fsTester.getRMStateStore();
+ String appAttemptIdStr3 = "appattempt_1352994193343_0001_000003";
+ ApplicationAttemptId attemptId3 =
+ ConverterUtils.toApplicationAttemptId(appAttemptIdStr3);
+ Path rootDir =
+ new Path(fileSystemRMStateStore.fsWorkingPath, "FSRMStateRoot");
+ Path appRootDir = new Path(rootDir, "RMAppRoot");
+ Path appDir =
+ new Path(appRootDir, attemptId3.getApplicationId().toString());
+ Path tempAppAttemptFile =
+ new Path(appDir, attemptId3.toString() + ".tmp");
+ fsOut = fileSystemRMStateStore.fs.create(tempAppAttemptFile, false);
+ fsOut.write("Some random data ".getBytes());
+ fsOut.close();
+
+ testRMAppStateStore(fsTester);
+ Assert.assertFalse(fileSystemRMStateStore.fsWorkingPath
+ .getFileSystem(conf).exists(tempAppAttemptFile));
+ testRMDTSecretManagerStateStore(fsTester);
+ } finally {
+ cluster.shutdown();
+ }
+ }
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java
new file mode 100644
index 0000000000000..a6929a8936635
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.resourcemanager.recovery;
+
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.zookeeper.ZooKeeper;
+import org.junit.Test;
+
+public class TestZKRMStateStore extends RMStateStoreTestBase {
+
+ public static final Log LOG = LogFactory.getLog(TestZKRMStateStore.class);
+
+ class TestZKRMStateStoreTester implements RMStateStoreHelper {
+
+ ZooKeeper client;
+ ZKRMStateStore store;
+
+ class TestZKRMStateStoreInternal extends ZKRMStateStore {
+
+ public TestZKRMStateStoreInternal(Configuration conf, String workingZnode)
+ throws Exception {
+ init(conf);
+ start();
+ assertTrue(znodeWorkingPath.equals(workingZnode));
+ }
+
+ @Override
+ public ZooKeeper getNewZooKeeper() throws IOException {
+ return client;
+ }
+ }
+
+ public RMStateStore getRMStateStore() throws Exception {
+ String workingZnode = "/Test";
+ YarnConfiguration conf = new YarnConfiguration();
+ conf.set(YarnConfiguration.ZK_RM_STATE_STORE_ADDRESS, hostPort);
+ conf.set(YarnConfiguration.ZK_RM_STATE_STORE_PARENT_PATH, workingZnode);
+ this.client = createClient();
+ this.store = new TestZKRMStateStoreInternal(conf, workingZnode);
+ return this.store;
+ }
+
+ @Override
+ public boolean isFinalStateValid() throws Exception {
+ List<String> nodes = client.getChildren(store.znodeWorkingPath, false);
+ return nodes.size() == 1;
+ }
+ }
+
+ @Test
+ public void testZKRMStateStoreRealZK() throws Exception {
+ TestZKRMStateStoreTester zkTester = new TestZKRMStateStoreTester();
+ testRMAppStateStore(zkTester);
+ testRMDTSecretManagerStateStore(zkTester);
+ }
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStoreZKClientConnections.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStoreZKClientConnections.java
index 7c807a5b60202..82e550c9173cc 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStoreZKClientConnections.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStoreZKClientConnections.java
@@ -24,7 +24,7 @@
import org.apache.hadoop.ha.ClientBaseWithFixes;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.server.resourcemanager.recovery.TestRMStateStore.TestDispatcher;
+import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStoreTestBase.TestDispatcher;
import org.apache.hadoop.util.ZKUtil;
import org.apache.zookeeper.CreateMode;
@@ -43,17 +43,20 @@
public class TestZKRMStateStoreZKClientConnections extends
ClientBaseWithFixes {
+
private static final int ZK_OP_WAIT_TIME = 3000;
private Log LOG =
LogFactory.getLog(TestZKRMStateStoreZKClientConnections.class);
class TestZKClient {
+
ZKRMStateStore store;
boolean forExpire = false;
TestForwardingWatcher watcher;
CyclicBarrier syncBarrier = new CyclicBarrier(2);
protected class TestZKRMStateStore extends ZKRMStateStore {
+
public TestZKRMStateStore(Configuration conf, String workingZnode)
throws Exception {
init(conf);
@@ -87,6 +90,7 @@ public synchronized void processWatchEvent(WatchedEvent event)
private class TestForwardingWatcher extends
ClientBaseWithFixes.CountdownWatcher {
+
public void process(WatchedEvent event) {
super.process(event);
try {
@@ -187,7 +191,7 @@ public void testZKSessionTimeout() throws Exception {
}
}
- @Test (timeout = 20000)
+ @Test(timeout = 20000)
public void testSetZKAcl() {
TestZKClient zkClientTester = new TestZKClient();
YarnConfiguration conf = new YarnConfiguration();
@@ -196,10 +200,11 @@ public void testSetZKAcl() {
zkClientTester.store.zkClient.delete(zkClientTester.store
.znodeWorkingPath, -1);
fail("Shouldn't be able to delete path");
- } catch (Exception e) {/* expected behavior */}
+ } catch (Exception e) {/* expected behavior */
+ }
}
- @Test (timeout = 20000)
+ @Test(timeout = 20000)
public void testInvalidZKAclConfiguration() {
TestZKClient zkClientTester = new TestZKClient();
YarnConfiguration conf = new YarnConfiguration();
|
df09fcb6a53b70a828698fbad71e681edbcdc7f4
|
ReactiveX-RxJava
|
ObserveOn/SubscribeOn unit tests--
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/concurrency/ForwardingScheduler.java b/rxjava-core/src/main/java/rx/concurrency/ForwardingScheduler.java
new file mode 100644
index 0000000000..2714808766
--- /dev/null
+++ b/rxjava-core/src/main/java/rx/concurrency/ForwardingScheduler.java
@@ -0,0 +1,56 @@
+/**
+ * Copyright 2013 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package rx.concurrency;
+
+import rx.Scheduler;
+import rx.Subscription;
+import rx.util.functions.Action0;
+import rx.util.functions.Func0;
+
+import java.util.concurrent.TimeUnit;
+
+public class ForwardingScheduler implements Scheduler {
+ private final Scheduler underlying;
+
+ public ForwardingScheduler(Scheduler underlying) {
+ this.underlying = underlying;
+ }
+
+ @Override
+ public Subscription schedule(Action0 action) {
+ return underlying.schedule(action);
+ }
+
+ @Override
+ public Subscription schedule(Func0<Subscription> action) {
+ return underlying.schedule(action);
+ }
+
+ @Override
+ public Subscription schedule(Action0 action, long timespan, TimeUnit unit) {
+ return underlying.schedule(action, timespan, unit);
+ }
+
+ @Override
+ public Subscription schedule(Func0<Subscription> action, long timespan, TimeUnit unit) {
+ return underlying.schedule(action, timespan, unit);
+ }
+
+ @Override
+ public long now() {
+ return underlying.now();
+ }
+}
\ No newline at end of file
diff --git a/rxjava-core/src/main/java/rx/concurrency/ImmediateScheduler.java b/rxjava-core/src/main/java/rx/concurrency/ImmediateScheduler.java
index 59908e4e0c..e54d178ae0 100644
--- a/rxjava-core/src/main/java/rx/concurrency/ImmediateScheduler.java
+++ b/rxjava-core/src/main/java/rx/concurrency/ImmediateScheduler.java
@@ -38,8 +38,9 @@ private ImmediateScheduler() {
@Override
public Subscription schedule(Func0<Subscription> action) {
- action.call();
- return Subscriptions.empty();
+ DiscardableAction discardableAction = new DiscardableAction(action);
+ discardableAction.call();
+ return discardableAction;
}
public static class UnitTest {
diff --git a/rxjava-core/src/main/java/rx/concurrency/Schedulers.java b/rxjava-core/src/main/java/rx/concurrency/Schedulers.java
index 61b51d070d..9f5ff2065d 100644
--- a/rxjava-core/src/main/java/rx/concurrency/Schedulers.java
+++ b/rxjava-core/src/main/java/rx/concurrency/Schedulers.java
@@ -44,4 +44,8 @@ public static Scheduler executor(Executor executor) {
public static Scheduler scheduledExecutor(ScheduledExecutorService executor) {
return new ScheduledExecutorServiceScheduler(executor);
}
+
+ public static Scheduler forwardingScheduler(Scheduler underlying) {
+ return new ForwardingScheduler(underlying);
+ }
}
diff --git a/rxjava-core/src/main/java/rx/operators/OperationObserveOn.java b/rxjava-core/src/main/java/rx/operators/OperationObserveOn.java
index 85228f2cff..4e6e14bb85 100644
--- a/rxjava-core/src/main/java/rx/operators/OperationObserveOn.java
+++ b/rxjava-core/src/main/java/rx/operators/OperationObserveOn.java
@@ -15,14 +15,20 @@
*/
package rx.operators;
+import org.junit.Test;
import rx.Observable;
import rx.Observer;
import rx.Scheduler;
import rx.Subscription;
+import rx.concurrency.Schedulers;
import rx.util.functions.Action0;
-import rx.util.functions.Func0;
import rx.util.functions.Func1;
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
public class OperationObserveOn {
public static <T> Func1<Observer<T>, Subscription> observeOn(Observable<T> source, Scheduler scheduler) {
@@ -64,23 +70,46 @@ public void call() {
}
@Override
- public void onError(Exception e) {
+ public void onError(final Exception e) {
scheduler.schedule(new Action0() {
@Override
public void call() {
- underlying.onCompleted();
+ underlying.onError(e);
}
});
}
@Override
- public void onNext(T args) {
+ public void onNext(final T args) {
scheduler.schedule(new Action0() {
@Override
public void call() {
- underlying.onCompleted();
+ underlying.onNext(args);
}
});
}
}
+
+ public static class UnitTest {
+
+ @Test
+ @SuppressWarnings("unchecked")
+ public void testObserveOn() {
+
+ Scheduler scheduler = spy(Schedulers.forwardingScheduler(Schedulers.immediate()));
+
+ Observer<Integer> observer = mock(Observer.class);
+ Observable.create(observeOn(Observable.toObservable(1, 2, 3), scheduler)).subscribe(observer);
+
+ verify(scheduler, times(4)).schedule(any(Action0.class));
+ verifyNoMoreInteractions(scheduler);
+
+ verify(observer, times(1)).onNext(1);
+ verify(observer, times(1)).onNext(2);
+ verify(observer, times(1)).onNext(3);
+ verify(observer, times(1)).onCompleted();
+ }
+
+ }
+
}
diff --git a/rxjava-core/src/main/java/rx/operators/OperationSubscribeOn.java b/rxjava-core/src/main/java/rx/operators/OperationSubscribeOn.java
index b59dd5f37d..104a134657 100644
--- a/rxjava-core/src/main/java/rx/operators/OperationSubscribeOn.java
+++ b/rxjava-core/src/main/java/rx/operators/OperationSubscribeOn.java
@@ -15,14 +15,19 @@
*/
package rx.operators;
+import org.junit.Test;
import rx.Observable;
import rx.Observer;
import rx.Scheduler;
import rx.Subscription;
+import rx.concurrency.Schedulers;
import rx.util.functions.Action0;
import rx.util.functions.Func0;
import rx.util.functions.Func1;
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.*;
+
public class OperationSubscribeOn {
public static <T> Func1<Observer<T>, Subscription> subscribeOn(Observable<T> source, Scheduler scheduler) {
@@ -68,4 +73,30 @@ public void call() {
});
}
}
-}
+
+ public static class UnitTest {
+
+ @Test
+ @SuppressWarnings("unchecked")
+ public void testSubscribeOn() {
+ Observable<Integer> w = Observable.toObservable(1, 2, 3);
+
+ Scheduler scheduler = spy(Schedulers.forwardingScheduler(Schedulers.immediate()));
+
+ Observer<Integer> observer = mock(Observer.class);
+ Subscription subscription = Observable.create(subscribeOn(w, scheduler)).subscribe(observer);
+
+ verify(scheduler, times(1)).schedule(any(Func0.class));
+ subscription.unsubscribe();
+ verify(scheduler, times(1)).schedule(any(Action0.class));
+ verifyNoMoreInteractions(scheduler);
+
+ verify(observer, times(1)).onNext(1);
+ verify(observer, times(1)).onNext(2);
+ verify(observer, times(1)).onNext(3);
+ verify(observer, times(1)).onCompleted();
+ }
+
+ }
+
+}
\ No newline at end of file
|
55a5c26de8023fa65c0f231666034649f963f93f
|
elasticsearch
|
Fix NPE in RangeAggregator--
|
c
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java b/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java
index 8da74aa133a45..1b27e1c1991b9 100644
--- a/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java
+++ b/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java
@@ -94,7 +94,7 @@ public RangeAggregator(String name,
AggregationContext aggregationContext,
Aggregator parent) {
- super(name, BucketAggregationMode.MULTI_BUCKETS, factories, ranges.size() * parent.estimatedBucketCount(), aggregationContext, parent);
+ super(name, BucketAggregationMode.MULTI_BUCKETS, factories, ranges.size() * (parent == null ? 1 : parent.estimatedBucketCount()), aggregationContext, parent);
assert valuesSource != null;
this.valuesSource = valuesSource;
this.keyed = keyed;
|
5f234f8fa9c8f645b6281ecb81dfe7d1a0d7284e
|
drools
|
JBRULES-527: adding primitive support for hashcode- calculation on indexing--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@7155 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
a
|
https://github.com/kiegroup/drools
|
diff --git a/drools-core/src/main/java/org/drools/base/ClassFieldExtractor.java b/drools-core/src/main/java/org/drools/base/ClassFieldExtractor.java
index 6f8fb45d3f0..ba0b65421c7 100644
--- a/drools-core/src/main/java/org/drools/base/ClassFieldExtractor.java
+++ b/drools-core/src/main/java/org/drools/base/ClassFieldExtractor.java
@@ -144,4 +144,8 @@ public short getShortValue(final Object object) {
public Method getNativeReadMethod() {
return this.extractor.getNativeReadMethod();
}
+
+ public int getHashCode(Object object) {
+ return this.extractor.getHashCode( object );
+ }
}
\ No newline at end of file
diff --git a/drools-core/src/main/java/org/drools/base/extractors/BaseBooleanClassFieldExtractor.java b/drools-core/src/main/java/org/drools/base/extractors/BaseBooleanClassFieldExtractor.java
index 8738ae1de20..aedc2991541 100755
--- a/drools-core/src/main/java/org/drools/base/extractors/BaseBooleanClassFieldExtractor.java
+++ b/drools-core/src/main/java/org/drools/base/extractors/BaseBooleanClassFieldExtractor.java
@@ -78,5 +78,9 @@ public Method getNativeReadMethod() {
throw new RuntimeDroolsException("This is a bug. Please report to development team: "+e.getMessage(), e);
}
}
+
+ public int getHashCode(Object object) {
+ return getBooleanValue( object ) ? 1231 : 1237;
+ }
}
diff --git a/drools-core/src/main/java/org/drools/base/extractors/BaseByteClassFieldExtractor.java b/drools-core/src/main/java/org/drools/base/extractors/BaseByteClassFieldExtractor.java
index 06d5a7896c6..fa3b8dc7025 100755
--- a/drools-core/src/main/java/org/drools/base/extractors/BaseByteClassFieldExtractor.java
+++ b/drools-core/src/main/java/org/drools/base/extractors/BaseByteClassFieldExtractor.java
@@ -79,4 +79,7 @@ public Method getNativeReadMethod() {
}
}
+ public int getHashCode(Object object) {
+ return getByteValue( object );
+ }
}
diff --git a/drools-core/src/main/java/org/drools/base/extractors/BaseCharClassFieldExtractor.java b/drools-core/src/main/java/org/drools/base/extractors/BaseCharClassFieldExtractor.java
index 848749cccf5..257649cfdc6 100755
--- a/drools-core/src/main/java/org/drools/base/extractors/BaseCharClassFieldExtractor.java
+++ b/drools-core/src/main/java/org/drools/base/extractors/BaseCharClassFieldExtractor.java
@@ -56,4 +56,8 @@ public Method getNativeReadMethod() {
throw new RuntimeDroolsException("This is a bug. Please report to development team: "+e.getMessage(), e);
}
}
+
+ public int getHashCode(Object object) {
+ return getCharValue( object );
+ }
}
diff --git a/drools-core/src/main/java/org/drools/base/extractors/BaseDoubleClassFieldExtractor.java b/drools-core/src/main/java/org/drools/base/extractors/BaseDoubleClassFieldExtractor.java
index 4c5433cef32..5e1b41601d7 100755
--- a/drools-core/src/main/java/org/drools/base/extractors/BaseDoubleClassFieldExtractor.java
+++ b/drools-core/src/main/java/org/drools/base/extractors/BaseDoubleClassFieldExtractor.java
@@ -57,4 +57,10 @@ public Method getNativeReadMethod() {
throw new RuntimeDroolsException("This is a bug. Please report to development team: "+e.getMessage(), e);
}
}
+
+ public int getHashCode(Object object) {
+ long temp = Double.doubleToLongBits( getDoubleValue( object ) );
+ return (int) ( temp ^ ( temp >>> 32) );
+ }
+
}
diff --git a/drools-core/src/main/java/org/drools/base/extractors/BaseFloatClassFieldExtractor.java b/drools-core/src/main/java/org/drools/base/extractors/BaseFloatClassFieldExtractor.java
index 1e0932cb9dc..82f4330d633 100755
--- a/drools-core/src/main/java/org/drools/base/extractors/BaseFloatClassFieldExtractor.java
+++ b/drools-core/src/main/java/org/drools/base/extractors/BaseFloatClassFieldExtractor.java
@@ -57,4 +57,9 @@ public Method getNativeReadMethod() {
throw new RuntimeDroolsException("This is a bug. Please report to development team: "+e.getMessage(), e);
}
}
+
+ public int getHashCode(Object object) {
+ return Float.floatToIntBits( getFloatValue( object ) );
+ }
+
}
diff --git a/drools-core/src/main/java/org/drools/base/extractors/BaseIntClassFieldExtractor.java b/drools-core/src/main/java/org/drools/base/extractors/BaseIntClassFieldExtractor.java
index 606029c6a39..28e24f6fde8 100755
--- a/drools-core/src/main/java/org/drools/base/extractors/BaseIntClassFieldExtractor.java
+++ b/drools-core/src/main/java/org/drools/base/extractors/BaseIntClassFieldExtractor.java
@@ -57,4 +57,8 @@ public Method getNativeReadMethod() {
throw new RuntimeDroolsException("This is a bug. Please report to development team: "+e.getMessage(), e);
}
}
+
+ public int getHashCode(Object object) {
+ return getIntValue( object );
+ }
}
diff --git a/drools-core/src/main/java/org/drools/base/extractors/BaseLongClassFieldExtractors.java b/drools-core/src/main/java/org/drools/base/extractors/BaseLongClassFieldExtractors.java
index 8adeb2a55f6..aafa76cd1ad 100755
--- a/drools-core/src/main/java/org/drools/base/extractors/BaseLongClassFieldExtractors.java
+++ b/drools-core/src/main/java/org/drools/base/extractors/BaseLongClassFieldExtractors.java
@@ -57,4 +57,10 @@ public Method getNativeReadMethod() {
throw new RuntimeDroolsException("This is a bug. Please report to development team: "+e.getMessage(), e);
}
}
+
+ public int getHashCode(Object object) {
+ long temp = getLongValue( object );
+ return (int) ( temp ^ ( temp >>> 32 ));
+ }
+
}
diff --git a/drools-core/src/main/java/org/drools/base/extractors/BaseObjectClassFieldExtractor.java b/drools-core/src/main/java/org/drools/base/extractors/BaseObjectClassFieldExtractor.java
index b3a0a3fa11d..dff24f0b717 100755
--- a/drools-core/src/main/java/org/drools/base/extractors/BaseObjectClassFieldExtractor.java
+++ b/drools-core/src/main/java/org/drools/base/extractors/BaseObjectClassFieldExtractor.java
@@ -104,4 +104,9 @@ public Method getNativeReadMethod() {
throw new RuntimeDroolsException("This is a bug. Please report to development team: "+e.getMessage(), e);
}
}
+
+ public int getHashCode(Object object) {
+ return getValue( object ).hashCode();
+ }
+
}
diff --git a/drools-core/src/main/java/org/drools/base/extractors/BaseShortClassFieldExtractor.java b/drools-core/src/main/java/org/drools/base/extractors/BaseShortClassFieldExtractor.java
index 5d94f12ef7f..db09964d2b2 100755
--- a/drools-core/src/main/java/org/drools/base/extractors/BaseShortClassFieldExtractor.java
+++ b/drools-core/src/main/java/org/drools/base/extractors/BaseShortClassFieldExtractor.java
@@ -57,4 +57,8 @@ public Method getNativeReadMethod() {
throw new RuntimeDroolsException("This is a bug. Please report to development team: "+e.getMessage(), e);
}
}
+
+ public int getHashCode(Object object) {
+ return getShortValue( object );
+ }
}
diff --git a/drools-core/src/main/java/org/drools/common/EqualityAssertMapComparator.java b/drools-core/src/main/java/org/drools/common/EqualityAssertMapComparator.java
index a542587afc8..e63b1e71ca4 100644
--- a/drools-core/src/main/java/org/drools/common/EqualityAssertMapComparator.java
+++ b/drools-core/src/main/java/org/drools/common/EqualityAssertMapComparator.java
@@ -71,6 +71,6 @@ public int compare(final Object o1,
}
public String toString() {
- return "identity";
+ return "equality";
}
}
diff --git a/drools-core/src/main/java/org/drools/facttemplates/FactTemplateFieldExtractor.java b/drools-core/src/main/java/org/drools/facttemplates/FactTemplateFieldExtractor.java
index 7388717eed3..33725a3f10a 100644
--- a/drools-core/src/main/java/org/drools/facttemplates/FactTemplateFieldExtractor.java
+++ b/drools-core/src/main/java/org/drools/facttemplates/FactTemplateFieldExtractor.java
@@ -76,4 +76,7 @@ public Method getNativeReadMethod() {
}
}
+ public int getHashCode(Object object) {
+ return getValue( object ).hashCode();
+ }
}
diff --git a/drools-core/src/main/java/org/drools/rule/Declaration.java b/drools-core/src/main/java/org/drools/rule/Declaration.java
index 65b1013bb63..732d91a3c8e 100644
--- a/drools-core/src/main/java/org/drools/rule/Declaration.java
+++ b/drools-core/src/main/java/org/drools/rule/Declaration.java
@@ -188,6 +188,9 @@ public boolean getBooleanValue(Object object) {
return this.extractor.getBooleanValue( object );
}
+ public int getHashCode(Object object) {
+ return this.extractor.getHashCode( object );
+ }
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
public String toString() {
diff --git a/drools-core/src/main/java/org/drools/spi/ColumnExtractor.java b/drools-core/src/main/java/org/drools/spi/ColumnExtractor.java
index 5a2ad1be769..cf2f13c5e68 100644
--- a/drools-core/src/main/java/org/drools/spi/ColumnExtractor.java
+++ b/drools-core/src/main/java/org/drools/spi/ColumnExtractor.java
@@ -124,4 +124,7 @@ public Method getNativeReadMethod() {
}
}
+ public int getHashCode(Object object) {
+ return getValue( object ).hashCode();
+ }
}
\ No newline at end of file
diff --git a/drools-core/src/main/java/org/drools/spi/Extractor.java b/drools-core/src/main/java/org/drools/spi/Extractor.java
index 98d442cff4a..929c396cf82 100644
--- a/drools-core/src/main/java/org/drools/spi/Extractor.java
+++ b/drools-core/src/main/java/org/drools/spi/Extractor.java
@@ -48,5 +48,7 @@ public interface Extractor
public Class getExtractToClass();
public Method getNativeReadMethod();
+
+ public int getHashCode(Object object);
}
\ No newline at end of file
diff --git a/drools-core/src/main/java/org/drools/util/AbstractHashTable.java b/drools-core/src/main/java/org/drools/util/AbstractHashTable.java
index 3477b42d3fe..a815e2ab06d 100644
--- a/drools-core/src/main/java/org/drools/util/AbstractHashTable.java
+++ b/drools-core/src/main/java/org/drools/util/AbstractHashTable.java
@@ -415,15 +415,13 @@ public SingleIndex(final FieldIndex[] indexes,
public int hashCodeOf(final Object object) {
int hashCode = this.startResult;
- final Object value = this.extractor.getValue( object );
- hashCode += TupleIndexHashTable.PRIME * hashCode + ((value == null) ? 0 : value.hashCode());
+ hashCode = TupleIndexHashTable.PRIME * hashCode + this.extractor.getHashCode( object );
return this.comparator.rehash( hashCode );
}
public int hashCodeOf(final ReteTuple tuple) {
int hashCode = this.startResult;
- final Object value = this.declaration.getValue( tuple.get( this.declaration ).getObject() );
- hashCode += TupleIndexHashTable.PRIME * hashCode + ((value == null) ? 0 : value.hashCode());
+ hashCode = TupleIndexHashTable.PRIME * hashCode + this.declaration.getHashCode( tuple.get( this.declaration ).getObject() );
return this.comparator.rehash( hashCode );
}
@@ -479,11 +477,11 @@ public DoubleCompositeIndex(final FieldIndex[] indexes,
public int hashCodeOf(final Object object) {
int hashCode = this.startResult;
- Object value = this.index0.extractor.getValue( object );
- hashCode += TupleIndexHashTable.PRIME * hashCode + ((value == null) ? 0 : value.hashCode());
+ int hash = this.index0.extractor.getHashCode( object );
+ hashCode = TupleIndexHashTable.PRIME * hashCode + hash;
- value = this.index1.extractor.getValue( object );
- hashCode += TupleIndexHashTable.PRIME * hashCode + ((value == null) ? 0 : value.hashCode());
+ hash = this.index1.extractor.getHashCode( object );
+ hashCode = TupleIndexHashTable.PRIME * hashCode + hash;
return this.comparator.rehash( hashCode );
}
@@ -491,11 +489,9 @@ public int hashCodeOf(final Object object) {
public int hashCodeOf(final ReteTuple tuple) {
int hashCode = this.startResult;
- Object value = this.index0.declaration.getValue( tuple.get( this.index0.declaration ).getObject() );
- hashCode += TupleIndexHashTable.PRIME * hashCode + ((value == null) ? 0 : value.hashCode());
+ hashCode = TupleIndexHashTable.PRIME * hashCode + this.index0.declaration.getHashCode( tuple.get( this.index0.declaration ).getObject() );
- value = this.index1.declaration.getValue( tuple.get( this.index1.declaration ).getObject() );
- hashCode += TupleIndexHashTable.PRIME * hashCode + ((value == null) ? 0 : value.hashCode());
+ hashCode = TupleIndexHashTable.PRIME * hashCode + this.index1.declaration.getHashCode( tuple.get( this.index1.declaration ).getObject() );
return this.comparator.rehash( hashCode );
}
@@ -590,14 +586,9 @@ public TripleCompositeIndex(final FieldIndex[] indexes,
public int hashCodeOf(final Object object) {
int hashCode = this.startResult;
- Object value = this.index0.extractor.getValue( object );
- hashCode += TupleIndexHashTable.PRIME * hashCode + ((value == null) ? 0 : value.hashCode());
-
- value = this.index1.extractor.getValue( object );
- hashCode += TupleIndexHashTable.PRIME * hashCode + ((value == null) ? 0 : value.hashCode());
-
- value = this.index2.extractor.getValue( object );
- hashCode += TupleIndexHashTable.PRIME * hashCode + ((value == null) ? 0 : value.hashCode());
+ hashCode = TupleIndexHashTable.PRIME * hashCode + this.index0.extractor.getHashCode( object );;
+ hashCode = TupleIndexHashTable.PRIME * hashCode + this.index1.extractor.getHashCode( object );;
+ hashCode = TupleIndexHashTable.PRIME * hashCode + this.index2.extractor.getHashCode( object );;
return this.comparator.rehash( hashCode );
}
@@ -605,14 +596,9 @@ public int hashCodeOf(final Object object) {
public int hashCodeOf(final ReteTuple tuple) {
int hashCode = this.startResult;
- Object value = this.index0.declaration.getValue( tuple.get( this.index0.declaration ).getObject() );
- hashCode += TupleIndexHashTable.PRIME * hashCode + ((value == null) ? 0 : value.hashCode());
-
- value = this.index1.declaration.getValue( tuple.get( this.index1.declaration ).getObject() );
- hashCode += TupleIndexHashTable.PRIME * hashCode + ((value == null) ? 0 : value.hashCode());
-
- value = this.index2.declaration.getValue( tuple.get( this.index2.declaration ).getObject() );
- hashCode += TupleIndexHashTable.PRIME * hashCode + ((value == null) ? 0 : value.hashCode());
+ hashCode = TupleIndexHashTable.PRIME * hashCode + this.index0.declaration.getHashCode( tuple.get( this.index0.declaration ).getObject() );
+ hashCode = TupleIndexHashTable.PRIME * hashCode + this.index1.declaration.getHashCode( tuple.get( this.index1.declaration ).getObject() );
+ hashCode = TupleIndexHashTable.PRIME * hashCode + this.index2.declaration.getHashCode( tuple.get( this.index2.declaration ).getObject() );
return this.comparator.rehash( hashCode );
}
diff --git a/drools-core/src/test/java/org/drools/reteoo/CompositeObjectSinkAdapterTest.java b/drools-core/src/test/java/org/drools/reteoo/CompositeObjectSinkAdapterTest.java
index 7339f0b1893..30bd2498b5a 100644
--- a/drools-core/src/test/java/org/drools/reteoo/CompositeObjectSinkAdapterTest.java
+++ b/drools-core/src/test/java/org/drools/reteoo/CompositeObjectSinkAdapterTest.java
@@ -244,6 +244,10 @@ public ValueType getValueType() {
// Auto-generated method stub
return null;
}
+
+ public int getHashCode(Object object) {
+ return 0;
+ }
}
|
d6f8a7f7ee7c689abdaa85b0c4d546e562f4e838
|
hadoop
|
YARN-539. Addressed memory leak of LocalResource- objects NM when a resource localization fails. Contributed by Omkar Vinit- Joshi. svn merge --ignore-ancestry -c 1466756 ../../trunk/--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1466757 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index a60b06ea032c9..c72ab75ab0265 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -159,6 +159,9 @@ Release 2.0.5-beta - UNRELEASED
YARN-534. Change RM restart recovery to also account for AM max-attempts
configuration after the restart. (Jian He via vinodkv)
+ YARN-539. Addressed memory leak of LocalResource objects NM when a resource
+ localization fails. (Omkar Vinit Joshi via vinodkv)
+
Release 2.0.4-alpha - UNRELEASED
INCOMPATIBLE CHANGES
diff --git a/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml b/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
index 247406434e703..4ba2d72289e71 100644
--- a/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
+++ b/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
@@ -270,4 +270,11 @@
<Bug pattern="NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE" />
</Match>
+ <!-- This type cast problem will never occur. -->
+ <Match>
+ <Class name="org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.LocalResourcesTrackerImpl" />
+ <Method name="handle" />
+ <Bug pattern="BC_UNCONFIRMED_CAST" />
+ </Match>
+
</FindBugsFilter>
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourcesTracker.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourcesTracker.java
index 2e795e54a10e7..98ec471abf0db 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourcesTracker.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourcesTracker.java
@@ -40,8 +40,5 @@ interface LocalResourcesTracker
String getUser();
- // TODO: Remove this in favour of EventHandler.handle
- void localizationCompleted(LocalResourceRequest req, boolean success);
-
long nextUniqueNumber();
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourcesTrackerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourcesTrackerImpl.java
index 53ca9013da8d5..786b58ca5d01f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourcesTrackerImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourcesTrackerImpl.java
@@ -33,6 +33,7 @@
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.server.nodemanager.DeletionService;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceEvent;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceReleaseEvent;
/**
@@ -96,13 +97,22 @@ public LocalResourcesTrackerImpl(String user, Dispatcher dispatcher,
this.conf = conf;
}
+ /*
+ * Synchronizing this method for avoiding races due to multiple ResourceEvent's
+ * coming to LocalResourcesTracker from Public/Private localizer and
+ * Resource Localization Service.
+ */
@Override
- public void handle(ResourceEvent event) {
+ public synchronized void handle(ResourceEvent event) {
LocalResourceRequest req = event.getLocalResourceRequest();
LocalizedResource rsrc = localrsrc.get(req);
switch (event.getType()) {
- case REQUEST:
case LOCALIZED:
+ if (useLocalCacheDirectoryManager) {
+ inProgressLocalResourcesMap.remove(req);
+ }
+ break;
+ case REQUEST:
if (rsrc != null && (!isResourcePresent(rsrc))) {
LOG.info("Resource " + rsrc.getLocalPath()
+ " is missing, localizing it again");
@@ -117,10 +127,24 @@ public void handle(ResourceEvent event) {
break;
case RELEASE:
if (null == rsrc) {
- LOG.info("Release unknown rsrc null (discard)");
+ // The container sent a release event on a resource which
+ // 1) Failed
+ // 2) Removed for some reason (ex. disk is no longer accessible)
+ ResourceReleaseEvent relEvent = (ResourceReleaseEvent) event;
+ LOG.info("Container " + relEvent.getContainer()
+ + " sent RELEASE event on a resource request " + req
+ + " not present in cache.");
return;
}
break;
+ case LOCALIZATION_FAILED:
+ decrementFileCountForLocalCacheDirectory(req, null);
+ /*
+ * If resource localization fails then Localized resource will be
+ * removed from local cache.
+ */
+ localrsrc.remove(req);
+ break;
}
rsrc.handle(event);
}
@@ -279,18 +303,6 @@ public Iterator<LocalizedResource> iterator() {
}
}
- @Override
- public void localizationCompleted(LocalResourceRequest req,
- boolean success) {
- if (useLocalCacheDirectoryManager) {
- if (!success) {
- decrementFileCountForLocalCacheDirectory(req, null);
- } else {
- inProgressLocalResourcesMap.remove(req);
- }
- }
- }
-
@Override
public long nextUniqueNumber() {
return uniqueNumberGenerator.incrementAndGet();
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalizedResource.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalizedResource.java
index 00709fd91c290..f0cd87b573a41 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalizedResource.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalizedResource.java
@@ -32,10 +32,12 @@
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerResourceFailedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerResourceLocalizedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerResourceRequestEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceEventType;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceFailedLocalizationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceLocalizedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceReleaseEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceRequestEvent;
@@ -89,6 +91,8 @@ ResourceEventType.LOCALIZED, new FetchSuccessTransition())
.addTransition(ResourceState.DOWNLOADING,
EnumSet.of(ResourceState.DOWNLOADING, ResourceState.INIT),
ResourceEventType.RELEASE, new ReleasePendingTransition())
+ .addTransition(ResourceState.DOWNLOADING, ResourceState.FAILED,
+ ResourceEventType.LOCALIZATION_FAILED, new FetchFailedTransition())
// From LOCALIZED (ref >= 0, on disk)
.addTransition(ResourceState.LOCALIZED, ResourceState.LOCALIZED,
@@ -126,12 +130,14 @@ public String toString() {
}
private void release(ContainerId container) {
- if (!ref.remove(container)) {
- LOG.info("Attempt to release claim on " + this +
- " from unregistered container " + container);
- assert false; // TODO: FIX
+ if (ref.remove(container)) {
+ // updating the timestamp only in case of success.
+ timestamp.set(currentTime());
+ } else {
+ LOG.info("Container " + container
+ + " doesn't exist in the container list of the Resource " + this
+ + " to which it sent RELEASE event");
}
- timestamp.set(currentTime());
}
private long currentTime() {
@@ -250,6 +256,25 @@ public void transition(LocalizedResource rsrc, ResourceEvent event) {
}
}
+ /**
+ * Resource localization failed, notify waiting containers.
+ */
+ @SuppressWarnings("unchecked")
+ private static class FetchFailedTransition extends ResourceTransition {
+ @Override
+ public void transition(LocalizedResource rsrc, ResourceEvent event) {
+ ResourceFailedLocalizationEvent failedEvent =
+ (ResourceFailedLocalizationEvent) event;
+ Queue<ContainerId> containers = rsrc.ref;
+ Throwable failureCause = failedEvent.getCause();
+ for (ContainerId container : containers) {
+ rsrc.dispatcher.getEventHandler().handle(
+ new ContainerResourceFailedEvent(container, failedEvent
+ .getLocalResourceRequest(), failureCause));
+ }
+ }
+ }
+
/**
* Resource already localized, notify immediately.
*/
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
index 5058cb2cad9ea..7b9873a1f4578 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
@@ -84,7 +84,6 @@
import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerAction;
import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerHeartbeatResponse;
import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerStatus;
-import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.ResourceStatusType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEventType;
@@ -101,6 +100,7 @@
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerResourceRequestEvent;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceFailedLocalizationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceLocalizedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceReleaseEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceRequestEvent;
@@ -683,7 +683,6 @@ public void addResource(LocalizerResourceRequestEvent request) {
}
@Override
- @SuppressWarnings("unchecked") // dispatcher not typed
public void run() {
try {
// TODO shutdown, better error handling esp. DU
@@ -699,10 +698,8 @@ public void run() {
return;
}
LocalResourceRequest key = assoc.getResource().getRequest();
- assoc.getResource().handle(
- new ResourceLocalizedEvent(key,
- local, FileUtil.getDU(new File(local.toUri()))));
- publicRsrc.localizationCompleted(key, true);
+ publicRsrc.handle(new ResourceLocalizedEvent(key, local, FileUtil
+ .getDU(new File(local.toUri()))));
synchronized (attempts) {
attempts.remove(key);
}
@@ -710,13 +707,10 @@ public void run() {
LOG.info("Failed to download rsrc " + assoc.getResource(),
e.getCause());
LocalResourceRequest req = assoc.getResource().getRequest();
- dispatcher.getEventHandler().handle(
- new ContainerResourceFailedEvent(
- assoc.getContext().getContainerId(),
- req, e.getCause()));
- publicRsrc.localizationCompleted(req, false);
- List<LocalizerResourceRequestEvent> reqs;
+ publicRsrc.handle(new ResourceFailedLocalizationEvent(req, e
+ .getCause()));
synchronized (attempts) {
+ List<LocalizerResourceRequestEvent> reqs;
reqs = attempts.get(req);
if (null == reqs) {
LOG.error("Missing pending list for " + req);
@@ -724,13 +718,6 @@ public void run() {
}
attempts.remove(req);
}
- // let the other containers know about the localization failure
- for (LocalizerResourceRequestEvent reqEvent : reqs) {
- dispatcher.getEventHandler().handle(
- new ContainerResourceFailedEvent(
- reqEvent.getContext().getContainerId(),
- reqEvent.getResource().getRequest(), e.getCause()));
- }
} catch (CancellationException e) {
// ignore; shutting down
}
@@ -810,13 +797,14 @@ private LocalResource findNextResource() {
return null;
}
- // TODO this sucks. Fix it later
- @SuppressWarnings("unchecked") // dispatcher not typed
LocalizerHeartbeatResponse update(
List<LocalResourceStatus> remoteResourceStatuses) {
LocalizerHeartbeatResponse response =
recordFactory.newRecordInstance(LocalizerHeartbeatResponse.class);
+ String user = context.getUser();
+ ApplicationId applicationId =
+ context.getContainerId().getApplicationAttemptId().getApplicationId();
// The localizer has just spawned. Start giving it resources for
// remote-fetching.
if (remoteResourceStatuses.isEmpty()) {
@@ -847,6 +835,11 @@ LocalizerHeartbeatResponse update(
}
ArrayList<ResourceLocalizationSpec> rsrcs =
new ArrayList<ResourceLocalizationSpec>();
+ /*
+ * TODO : It doesn't support multiple downloads per ContainerLocalizer
+ * at the same time. We need to think whether we should support this.
+ */
+
for (LocalResourceStatus stat : remoteResourceStatuses) {
LocalResource rsrc = stat.getResource();
LocalResourceRequest req = null;
@@ -865,11 +858,10 @@ LocalizerHeartbeatResponse update(
case FETCH_SUCCESS:
// notify resource
try {
- assoc.getResource().handle(
- new ResourceLocalizedEvent(req,
- ConverterUtils.getPathFromYarnURL(stat.getLocalPath()),
- stat.getLocalSize()));
- localizationCompleted(stat);
+ getLocalResourcesTracker(req.getVisibility(), user, applicationId)
+ .handle(
+ new ResourceLocalizedEvent(req, ConverterUtils
+ .getPathFromYarnURL(stat.getLocalPath()), stat.getLocalSize()));
} catch (URISyntaxException e) { }
if (pending.isEmpty()) {
// TODO: Synchronization
@@ -899,19 +891,16 @@ LocalizerHeartbeatResponse update(
LOG.info("DEBUG: FAILED " + req, stat.getException());
assoc.getResource().unlock();
response.setLocalizerAction(LocalizerAction.DIE);
- localizationCompleted(stat);
- // TODO: Why is this event going directly to the container. Why not
- // the resource itself? What happens to the resource? Is it removed?
- dispatcher.getEventHandler().handle(
- new ContainerResourceFailedEvent(context.getContainerId(),
- req, stat.getException()));
+ getLocalResourcesTracker(req.getVisibility(), user, applicationId)
+ .handle(
+ new ResourceFailedLocalizationEvent(req, stat.getException()));
break;
default:
LOG.info("Unknown status: " + stat.getStatus());
response.setLocalizerAction(LocalizerAction.DIE);
- dispatcher.getEventHandler().handle(
- new ContainerResourceFailedEvent(context.getContainerId(),
- req, stat.getException()));
+ getLocalResourcesTracker(req.getVisibility(), user, applicationId)
+ .handle(
+ new ResourceFailedLocalizationEvent(req, stat.getException()));
break;
}
}
@@ -919,27 +908,6 @@ LocalizerHeartbeatResponse update(
return response;
}
- private void localizationCompleted(LocalResourceStatus stat) {
- try {
- LocalResource rsrc = stat.getResource();
- LocalResourceRequest key = new LocalResourceRequest(rsrc);
- String user = context.getUser();
- ApplicationId appId =
- context.getContainerId().getApplicationAttemptId()
- .getApplicationId();
- LocalResourceVisibility vis = rsrc.getVisibility();
- LocalResourcesTracker tracker =
- getLocalResourcesTracker(vis, user, appId);
- if (stat.getStatus() == ResourceStatusType.FETCH_SUCCESS) {
- tracker.localizationCompleted(key, true);
- } else {
- tracker.localizationCompleted(key, false);
- }
- } catch (URISyntaxException e) {
- LOG.error("Invalid resource URL specified", e);
- }
- }
-
private Path getPathForLocalization(LocalResource rsrc) throws IOException,
URISyntaxException {
String user = context.getUser();
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceState.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceState.java
index 751f60e0af172..75c8ad7663cf3 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceState.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceState.java
@@ -20,5 +20,6 @@
enum ResourceState {
INIT,
DOWNLOADING,
- LOCALIZED
+ LOCALIZED,
+ FAILED
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/ResourceEventType.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/ResourceEventType.java
index d68a1b6d39134..e657c0acf3c62 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/ResourceEventType.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/ResourceEventType.java
@@ -29,5 +29,7 @@ public enum ResourceEventType {
/** See {@link ResourceLocalizedEvent} */
LOCALIZED,
/** See {@link ResourceReleaseEvent} */
- RELEASE
+ RELEASE,
+ /** See {@link ResourceFailedLocalizationEvent} */
+ LOCALIZATION_FAILED
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/ResourceFailedLocalizationEvent.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/ResourceFailedLocalizationEvent.java
new file mode 100644
index 0000000000000..79b28bac9088b
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/ResourceFailedLocalizationEvent.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event;
+
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.LocalResourceRequest;
+
+/**
+ * This event is sent by the localizer in case resource localization fails for
+ * the requested resource.
+ */
+public class ResourceFailedLocalizationEvent extends ResourceEvent {
+
+ private Throwable cause;
+
+ public ResourceFailedLocalizationEvent(LocalResourceRequest rsrc,
+ Throwable cause) {
+ super(rsrc, ResourceEventType.LOCALIZATION_FAILED);
+ this.cause = cause;
+ }
+
+ public Throwable getCause() {
+ return cause;
+ }
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResourcesTrackerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResourcesTrackerImpl.java
index a8bbdb035211f..b2caba02e81ca 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResourcesTrackerImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResourcesTrackerImpl.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer;
import static org.mockito.Mockito.any;
+import static org.mockito.Matchers.isA;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@@ -41,11 +42,15 @@
import org.apache.hadoop.yarn.event.DrainDispatcher;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.server.nodemanager.DeletionService;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerEventType;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerResourceFailedEvent;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerResourceLocalizedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerResourceRequestEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceEvent;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceFailedLocalizationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceLocalizedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceReleaseEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceRequestEvent;
@@ -224,6 +229,142 @@ public void testConsistency() {
}
}
+ @Test(timeout = 1000)
+ @SuppressWarnings("unchecked")
+ public void testLocalResourceCache() {
+ String user = "testuser";
+ DrainDispatcher dispatcher = null;
+ try {
+ Configuration conf = new Configuration();
+ dispatcher = createDispatcher(conf);
+
+ EventHandler<LocalizerEvent> localizerEventHandler =
+ mock(EventHandler.class);
+ EventHandler<ContainerEvent> containerEventHandler =
+ mock(EventHandler.class);
+
+ // Registering event handlers.
+ dispatcher.register(LocalizerEventType.class, localizerEventHandler);
+ dispatcher.register(ContainerEventType.class, containerEventHandler);
+
+ ConcurrentMap<LocalResourceRequest, LocalizedResource> localrsrc =
+ new ConcurrentHashMap<LocalResourceRequest, LocalizedResource>();
+ LocalResourcesTracker tracker =
+ new LocalResourcesTrackerImpl(user, dispatcher, localrsrc, true, conf);
+
+ LocalResourceRequest lr =
+ createLocalResourceRequest(user, 1, 1, LocalResourceVisibility.PUBLIC);
+
+ // Creating 2 containers for same application which will be requesting
+ // same local resource.
+ // Container 1 requesting local resource.
+ ContainerId cId1 = BuilderUtils.newContainerId(1, 1, 1, 1);
+ LocalizerContext lc1 = new LocalizerContext(user, cId1, null);
+ ResourceEvent reqEvent1 =
+ new ResourceRequestEvent(lr, LocalResourceVisibility.PRIVATE, lc1);
+
+ // No resource request is initially present in local cache
+ Assert.assertEquals(0, localrsrc.size());
+
+ // Container-1 requesting local resource.
+ tracker.handle(reqEvent1);
+
+ // New localized Resource should have been added to local resource map
+ // and the requesting container will be added to its waiting queue.
+ Assert.assertEquals(1, localrsrc.size());
+ Assert.assertTrue(localrsrc.containsKey(lr));
+ Assert.assertEquals(1, localrsrc.get(lr).getRefCount());
+ Assert.assertTrue(localrsrc.get(lr).ref.contains(cId1));
+ Assert.assertEquals(ResourceState.DOWNLOADING, localrsrc.get(lr)
+ .getState());
+
+ // Container 2 requesting the resource
+ ContainerId cId2 = BuilderUtils.newContainerId(1, 1, 1, 2);
+ LocalizerContext lc2 = new LocalizerContext(user, cId2, null);
+ ResourceEvent reqEvent2 =
+ new ResourceRequestEvent(lr, LocalResourceVisibility.PRIVATE, lc2);
+ tracker.handle(reqEvent2);
+
+ // Container 2 should have been added to the waiting queue of the local
+ // resource
+ Assert.assertEquals(2, localrsrc.get(lr).getRefCount());
+ Assert.assertTrue(localrsrc.get(lr).ref.contains(cId2));
+
+ // Failing resource localization
+ ResourceEvent resourceFailedEvent =
+ new ResourceFailedLocalizationEvent(lr, new Exception("test"));
+
+ // Backing up the resource to track its state change as it will be
+ // removed after the failed event.
+ LocalizedResource localizedResource = localrsrc.get(lr);
+
+ tracker.handle(resourceFailedEvent);
+
+ // After receiving failed resource event; all waiting containers will be
+ // notified with Container Resource Failed Event.
+ Assert.assertEquals(0, localrsrc.size());
+ verify(containerEventHandler, times(2)).handle(
+ isA(ContainerResourceFailedEvent.class));
+ Assert.assertEquals(ResourceState.FAILED, localizedResource.getState());
+
+ // Container 1 trying to release the resource (This resource is already
+ // deleted from the cache. This call should return silently without
+ // exception.
+ ResourceReleaseEvent relEvent1 = new ResourceReleaseEvent(lr, cId1);
+ tracker.handle(relEvent1);
+
+ // Container-3 now requests for the same resource. This request call
+ // is coming prior to Container-2's release call.
+ ContainerId cId3 = BuilderUtils.newContainerId(1, 1, 1, 3);
+ LocalizerContext lc3 = new LocalizerContext(user, cId3, null);
+ ResourceEvent reqEvent3 =
+ new ResourceRequestEvent(lr, LocalResourceVisibility.PRIVATE, lc3);
+ tracker.handle(reqEvent3);
+
+ // Local resource cache now should have the requested resource and the
+ // number of waiting containers should be 1.
+ Assert.assertEquals(1, localrsrc.size());
+ Assert.assertTrue(localrsrc.containsKey(lr));
+ Assert.assertEquals(1, localrsrc.get(lr).getRefCount());
+ Assert.assertTrue(localrsrc.get(lr).ref.contains(cId3));
+
+ // Container-2 Releases the resource
+ ResourceReleaseEvent relEvent2 = new ResourceReleaseEvent(lr, cId2);
+ tracker.handle(relEvent2);
+
+ // Making sure that there is no change in the cache after the release.
+ Assert.assertEquals(1, localrsrc.size());
+ Assert.assertTrue(localrsrc.containsKey(lr));
+ Assert.assertEquals(1, localrsrc.get(lr).getRefCount());
+ Assert.assertTrue(localrsrc.get(lr).ref.contains(cId3));
+
+ // Sending ResourceLocalizedEvent to tracker. In turn resource should
+ // send Container Resource Localized Event to waiting containers.
+ Path localizedPath = new Path("/tmp/file1");
+ ResourceLocalizedEvent localizedEvent =
+ new ResourceLocalizedEvent(lr, localizedPath, 123L);
+ tracker.handle(localizedEvent);
+
+ // Verifying ContainerResourceLocalizedEvent .
+ verify(containerEventHandler, times(1)).handle(
+ isA(ContainerResourceLocalizedEvent.class));
+ Assert.assertEquals(ResourceState.LOCALIZED, localrsrc.get(lr)
+ .getState());
+ Assert.assertEquals(1, localrsrc.get(lr).getRefCount());
+
+ // Container-3 releasing the resource.
+ ResourceReleaseEvent relEvent3 = new ResourceReleaseEvent(lr, cId3);
+ tracker.handle(relEvent3);
+
+ Assert.assertEquals(0, localrsrc.get(lr).getRefCount());
+
+ } finally {
+ if (dispatcher != null) {
+ dispatcher.stop();
+ }
+ }
+ }
+
@Test(timeout = 100000)
@SuppressWarnings("unchecked")
public void testHierarchicalLocalCacheDirectories() {
@@ -266,19 +407,25 @@ public void testHierarchicalLocalCacheDirectories() {
// Simulate the process of localization of lr1
Path hierarchicalPath1 = tracker.getPathForLocalization(lr1, localDir);
// Simulate lr1 getting localized
- ResourceLocalizedEvent rle =
+ ResourceLocalizedEvent rle1 =
new ResourceLocalizedEvent(lr1,
new Path(hierarchicalPath1.toUri().toString() +
Path.SEPARATOR + "file1"), 120);
- tracker.handle(rle);
+ tracker.handle(rle1);
// Localization successful.
- tracker.localizationCompleted(lr1, true);
LocalResourceRequest lr2 = createLocalResourceRequest(user, 3, 3,
LocalResourceVisibility.PUBLIC);
+ // Container 1 requests lr2 to be localized.
+ ResourceEvent reqEvent2 =
+ new ResourceRequestEvent(lr2, LocalResourceVisibility.PUBLIC, lc1);
+ tracker.handle(reqEvent2);
+
Path hierarchicalPath2 = tracker.getPathForLocalization(lr2, localDir);
// localization failed.
- tracker.localizationCompleted(lr2, false);
+ ResourceFailedLocalizationEvent rfe2 =
+ new ResourceFailedLocalizationEvent(lr2, new Exception("Test"));
+ tracker.handle(rfe2);
/*
* The path returned for two localization should be different because we
@@ -292,7 +439,11 @@ public void testHierarchicalLocalCacheDirectories() {
LocalResourceVisibility.PUBLIC, lc1);
tracker.handle(reqEvent3);
Path hierarchicalPath3 = tracker.getPathForLocalization(lr3, localDir);
- tracker.localizationCompleted(lr3, true);
+ // localization successful
+ ResourceLocalizedEvent rle3 =
+ new ResourceLocalizedEvent(lr3, new Path(hierarchicalPath3.toUri()
+ .toString() + Path.SEPARATOR + "file3"), 120);
+ tracker.handle(rle3);
// Verifying that path created is inside the subdirectory
Assert.assertEquals(hierarchicalPath3.toUri().toString(),
|
7fff3ab5f629e07a7b6d8ff56fc32dd68c9766f3
|
hbase
|
HADOOP-2308 null regioninfo breaks meta scanner--git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk/src/contrib/hbase@599875 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index 91a4b672d962..5a21198ebed6 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -41,6 +41,7 @@ Trunk (unreleased changes)
(Bryan Duxbury via Stack)
HADOOP-2295 Fix assigning a region to multiple servers
HADOOP-2234 TableInputFormat erroneously aggregates map values
+ HADOOP-2308 null regioninfo breaks meta scanner
IMPROVEMENTS
HADOOP-2401 Add convenience put method that takes writable
diff --git a/src/java/org/apache/hadoop/hbase/HMaster.java b/src/java/org/apache/hadoop/hbase/HMaster.java
index d5424d36a54b..08b8cd3989e1 100644
--- a/src/java/org/apache/hadoop/hbase/HMaster.java
+++ b/src/java/org/apache/hadoop/hbase/HMaster.java
@@ -229,14 +229,19 @@ protected void scanRegion(final MetaRegion region) throws IOException {
if (values == null || values.size() == 0) {
break;
}
-
for (Map.Entry<Writable, Writable> e: values.entrySet()) {
HStoreKey key = (HStoreKey) e.getKey();
results.put(key.getColumn(),
((ImmutableBytesWritable) e.getValue()).get());
}
- HRegionInfo info = (HRegionInfo) Writables.getWritable(
- results.get(COL_REGIONINFO), new HRegionInfo());
+ byte [] bytes = results.get(COL_REGIONINFO);
+ if (bytes == null) {
+ LOG.warn(COL_REGIONINFO.toString() + " is empty; has keys: " +
+ values.keySet().toString());
+ continue;
+ }
+ HRegionInfo info = (HRegionInfo) Writables.getWritable(bytes,
+ new HRegionInfo());
String serverName = Writables.bytesToString(results.get(COL_SERVER));
long startCode = Writables.bytesToLong(results.get(COL_STARTCODE));
if (LOG.isDebugEnabled()) {
|
19dea8c85287e1462e4719e5710ed1951d3cde6a
|
orientdb
|
Fixed issue about parenthesis in SQL query, added- one more test to the suite for it--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/filter/OSQLFilter.java b/core/src/main/java/com/orientechnologies/orient/core/sql/filter/OSQLFilter.java
index e2871394021..bcf8747740d 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/filter/OSQLFilter.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/filter/OSQLFilter.java
@@ -174,6 +174,10 @@ protected OSQLFilterCondition extractCondition() {
}
private OQueryOperator extractConditionOperator() {
+ if (currentPos >= text.length())
+ // END OF PARSING: JUST RETURN
+ return null;
+
String word;
word = nextWord(true, " 0123456789'\"");
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/filter/OSQLFilterCondition.java b/core/src/main/java/com/orientechnologies/orient/core/sql/filter/OSQLFilterCondition.java
index e0d29a4c0bf..fc2bfecad95 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/filter/OSQLFilterCondition.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/filter/OSQLFilterCondition.java
@@ -55,6 +55,10 @@ public Object evaluate(final ORecordSchemaAware<?> iRecord) {
r = convertedValues[1];
}
+ if (operator == null)
+ // UNITARY OPERATOR: JUST RETURN LEFT RESULT
+ return l;
+
return operator.evaluateRecord(iRecord, this, l, r);
}
@@ -137,11 +141,13 @@ public String toString() {
buffer.append('(');
buffer.append(left);
- buffer.append(' ');
- buffer.append(operator);
- buffer.append(' ');
- buffer.append(right);
- buffer.append(')');
+ if (operator != null) {
+ buffer.append(' ');
+ buffer.append(operator);
+ buffer.append(' ');
+ buffer.append(right);
+ buffer.append(')');
+ }
return buffer.toString();
}
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectTest.java
index 45b1c3d64ed..b54a80be89d 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectTest.java
@@ -54,6 +54,24 @@ public void queryNoWhere() {
database.close();
}
+
+ @Test
+ public void queryParentesisAsRight() {
+ database.open("admin", "admin");
+
+ List<ODocument> result = database.command(
+ new OSQLSynchQuery<ODocument>(
+ " select from Profile where name = 'Giuseppe' and ( name <> 'Napoleone' and nick is not null ) "))
+ .execute();
+
+ Assert.assertTrue(result.size() != 0);
+
+ for (ODocument d : result) {
+ Assert.assertEquals(d.getRecordType(), ODocument.RECORD_TYPE);
+ }
+
+ database.close();
+ }
@Test
public void queryTwoParentesisConditions() {
|
f57a8e904c824145a350c20fee1240503c7ca3b5
|
restlet-framework-java
|
Fixed potential NPE.--
|
c
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet/src/org/restlet/Client.java b/modules/org.restlet/src/org/restlet/Client.java
index 4c7a33a35a..2502fe4f87 100644
--- a/modules/org.restlet/src/org/restlet/Client.java
+++ b/modules/org.restlet/src/org/restlet/Client.java
@@ -99,7 +99,7 @@ public Client(Context context, List<Protocol> protocols, String helperClass) {
this.helper = null;
}
- if (context != null) {
+ if (context != null && this.helper != null) {
context.getAttributes().put("org.restlet.engine.helper",
this.helper);
}
diff --git a/modules/org.restlet/src/org/restlet/Server.java b/modules/org.restlet/src/org/restlet/Server.java
index dde2316ee5..007b75cb7d 100644
--- a/modules/org.restlet/src/org/restlet/Server.java
+++ b/modules/org.restlet/src/org/restlet/Server.java
@@ -136,7 +136,7 @@ public Server(Context context, List<Protocol> protocols, String address,
this.helper = null;
}
- if (context != null) {
+ if (context != null && this.helper != null) {
context.getAttributes().put("org.restlet.engine.helper",
this.helper);
}
|
e98541030c5e0aadfdb194dbb55254f404219600
|
orientdb
|
Huge refactoring on GraphDB: - changed class- names in vertex and edge - Optimized memory consumption by removing nested- records - Optimized speed in ORecord.equals() and hashCode(): now avoid field- checks (experimental)--
|
p
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordAbstract.java
index cd054ea61c5..cc4674f4842 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordAbstract.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordAbstract.java
@@ -185,6 +185,8 @@ public ORecordAbstract<T> save() {
OSerializationThreadLocal.INSTANCE.get().clear();
_database.save(this);
+
+ OSerializationThreadLocal.INSTANCE.get().clear();
return this;
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordVirtualAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordVirtualAbstract.java
index 91a40883f02..6ea13c242fc 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordVirtualAbstract.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordVirtualAbstract.java
@@ -15,10 +15,8 @@
*/
package com.orientechnologies.orient.core.record;
-import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
-import java.util.Map.Entry;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.metadata.schema.OType;
@@ -73,22 +71,22 @@ public ORecordSchemaAwareAbstract<T> reset() {
@Override
public int hashCode() {
int result = super.hashCode();
-
- if (!_recordId.isValid() && _fieldValues != null)
- for (Entry<String, T> field : _fieldValues.entrySet()) {
- if (field.getKey() != null)
- result += field.getKey().hashCode();
-
- if (field.getValue() != null)
- if (field.getValue() instanceof ORecord<?>)
- // AVOID TO GET THE HASH-CODE OF THE VALUE TO AVOID STACK OVERFLOW FOR CIRCULAR REFS
- result += 31 * ((ORecord<T>) field.getValue()).getIdentity().hashCode();
- else if (field.getValue() instanceof Collection<?>)
- // AVOID TO GET THE HASH-CODE OF THE VALUE TO AVOID STACK OVERFLOW FOR CIRCULAR REFS
- result += ((Collection<?>) field.getValue()).size() * 31;
- else
- result += field.getValue().hashCode();
- }
+ //
+ // if (!_recordId.isValid() && _fieldValues != null)
+ // for (Entry<String, T> field : _fieldValues.entrySet()) {
+ // if (field.getKey() != null)
+ // result += field.getKey().hashCode();
+ //
+ // if (field.getValue() != null)
+ // if (field.getValue() instanceof ORecord<?>)
+ // // AVOID TO GET THE HASH-CODE OF THE VALUE TO AVOID STACK OVERFLOW FOR CIRCULAR REFS
+ // result += 31 * ((ORecord<T>) field.getValue()).getIdentity().hashCode();
+ // else if (field.getValue() instanceof Collection<?>)
+ // // AVOID TO GET THE HASH-CODE OF THE VALUE TO AVOID STACK OVERFLOW FOR CIRCULAR REFS
+ // result += ((Collection<?>) field.getValue()).size() * 31;
+ // else
+ // result += field.getValue().hashCode();
+ // }
return result;
}
@@ -99,37 +97,39 @@ public boolean equals(Object obj) {
return false;
if (!_recordId.isValid()) {
- final ORecordVirtualAbstract<?> other = (ORecordVirtualAbstract<?>) obj;
-
- // NO PERSISTENT OBJECT: COMPARE EACH FIELDS
- if (_fieldValues == null || other._fieldValues == null)
- // CAN'T COMPARE FIELDS: RETURN FALSE
- return false;
-
- if (_fieldValues.size() != other._fieldValues.size())
- // FIELD SIZES ARE DIFFERENTS
- return false;
-
- String k;
- Object v;
- Object otherV;
- for (Entry<String, T> field : _fieldValues.entrySet()) {
- k = field.getKey();
- if (k != null && !other.containsField(k))
- // FIELD NOT PRESENT IN THE OTHER RECORD
- return false;
-
- v = _fieldValues.get(k);
- otherV = other._fieldValues.get(k);
- if (v == null && otherV == null)
- continue;
-
- if (v == null && otherV != null || otherV == null && v != null)
- return false;
-
- if (!v.equals(otherV))
- return false;
- }
+ //
+ // final ORecordVirtualAbstract<?> other = (ORecordVirtualAbstract<?>) obj;
+ //
+ // // NO PERSISTENT OBJECT: COMPARE EACH FIELDS
+ // if (_fieldValues == null || other._fieldValues == null)
+ // // CAN'T COMPARE FIELDS: RETURN FALSE
+ // return false;
+ //
+ // if (_fieldValues.size() != other._fieldValues.size())
+ // // FIELD SIZES ARE DIFFERENTS
+ // return false;
+ //
+ // String k;
+ // Object v;
+ // Object otherV;
+ // for (Entry<String, T> field : _fieldValues.entrySet()) {
+ // k = field.getKey();
+ // if (k != null && !other.containsField(k))
+ // // FIELD NOT PRESENT IN THE OTHER RECORD
+ // return false;
+ //
+ // v = _fieldValues.get(k);
+ // otherV = other._fieldValues.get(k);
+ // if (v == null && otherV == null)
+ // continue;
+ //
+ // if (v == null && otherV != null || otherV == null && v != null)
+ // return false;
+ //
+ // if (!v.equals(otherV))
+ // return false;
+ // }
+ return false;
}
return true;
diff --git a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/OSerializationThreadLocal.java b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/OSerializationThreadLocal.java
index 4faafab4991..b37dfb62aaa 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/OSerializationThreadLocal.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/OSerializationThreadLocal.java
@@ -15,7 +15,7 @@
*/
package com.orientechnologies.orient.core.serialization.serializer.record;
-import java.util.HashMap;
+import java.util.IdentityHashMap;
import java.util.Map;
import com.orientechnologies.orient.core.id.ORecordId;
@@ -26,6 +26,6 @@ public class OSerializationThreadLocal extends ThreadLocal<Map<ORecordInternal<?
@Override
protected Map<ORecordInternal<?>, ORecordId> initialValue() {
- return new HashMap<ORecordInternal<?>, ORecordId>();
+ return new IdentityHashMap<ORecordInternal<?>, ORecordId>();
}
}
\ No newline at end of file
diff --git a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java
index 3c99ffa202c..d172294974b 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/string/ORecordSerializerSchemaAware2CSV.java
@@ -202,6 +202,8 @@ protected String toString(ORecordInternal<?> iRecord, final String iFormat, fina
i++;
}
+
+ iMarshalledRecords.remove(record);
return buffer.toString();
}
|
e17f046915cce47bd95b4b8f4b197c8fb3d76481
|
intellij-community
|
bad linking of Xerces messages--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/xml/impl/src/com/intellij/xml/actions/ValidateXmlActionHandler.java b/xml/impl/src/com/intellij/xml/actions/ValidateXmlActionHandler.java
index 3355556caa120..d895468c85372 100644
--- a/xml/impl/src/com/intellij/xml/actions/ValidateXmlActionHandler.java
+++ b/xml/impl/src/com/intellij/xml/actions/ValidateXmlActionHandler.java
@@ -83,8 +83,14 @@ public void setErrorReporter(ErrorReporter errorReporter) {
myErrorReporter = errorReporter;
}
- private VirtualFile getFile(String publicId) {
- if (publicId == null) return myFile.getVirtualFile();
+ public VirtualFile getFile(String publicId, String systemId) {
+ if (publicId == null) {
+ if (systemId != null) {
+ final String path = myXmlResourceResolver.getPathByPublicId(systemId);
+ if (path != null) return VfsUtil.findRelativeFile(path,null);
+ }
+ return myFile.getVirtualFile();
+ }
final String path = myXmlResourceResolver.getPathByPublicId(publicId);
if (path != null) return VfsUtil.findRelativeFile(path,null);
return null;
@@ -132,7 +138,7 @@ public boolean isUniqueProblem(final SAXParseException e) {
private String buildMessageString(SAXParseException ex) {
String msg = "(" + ex.getLineNumber() + ":" + ex.getColumnNumber() + ") " + ex.getMessage();
- final VirtualFile file = getFile(ex.getPublicId());
+ final VirtualFile file = getFile(ex.getPublicId(), ex.getSystemId());
if ( file != null && !file.equals(myFile.getVirtualFile())) {
msg = file.getName() + ":" + msg;
@@ -257,7 +263,7 @@ public void run() {
myErrorsView.addMessage(
warning ? MessageCategory.WARNING : MessageCategory.ERROR,
new String[]{ex.getLocalizedMessage()},
- getFile(ex.getPublicId()),
+ getFile(ex.getPublicId(), ex.getSystemId()),
ex.getLineNumber() - 1 ,
ex.getColumnNumber() - 1, null);
}
diff --git a/xml/impl/src/com/intellij/xml/impl/ExternalDocumentValidator.java b/xml/impl/src/com/intellij/xml/impl/ExternalDocumentValidator.java
index 345ca3e1c2c50..41bcd894c1f01 100644
--- a/xml/impl/src/com/intellij/xml/impl/ExternalDocumentValidator.java
+++ b/xml/impl/src/com/intellij/xml/impl/ExternalDocumentValidator.java
@@ -12,6 +12,7 @@
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
+import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.profile.codeInspection.InspectionProjectProfileManager;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
@@ -72,7 +73,7 @@ private static class ValidationInfo {
private WeakReference<List<ValidationInfo>> myInfos; // last jaxp validation result
private void runJaxpValidation(final XmlElement element, Validator.ValidationHost host) {
- PsiFile file = element.getContainingFile();
+ final PsiFile file = element.getContainingFile();
if (myFile == file &&
file != null &&
@@ -116,6 +117,11 @@ public void run() {
return;
}
+ final VirtualFile errorFile = myHandler.getFile(e.getPublicId(), e.getSystemId());
+ if (errorFile != file.getVirtualFile() && errorFile != null) {
+ return; // error in attached schema
+ }
+
if (document.getLineCount() < e.getLineNumber() || e.getLineNumber() <= 0) {
return;
}
|
55149154710b8bd1825442c308fb9b4b76054a63
|
camel
|
[CAMEL-1289] HeaderFilterStrategy - move from- Component to Endpoint (for JHC component)--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@743889 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/camel
|
diff --git a/components/camel-jhc/src/main/java/org/apache/camel/component/jhc/JhcComponent.java b/components/camel-jhc/src/main/java/org/apache/camel/component/jhc/JhcComponent.java
index c96d3aa2e2123..20234e7ead3b4 100644
--- a/components/camel-jhc/src/main/java/org/apache/camel/component/jhc/JhcComponent.java
+++ b/components/camel-jhc/src/main/java/org/apache/camel/component/jhc/JhcComponent.java
@@ -20,21 +20,17 @@
import java.util.Map;
import org.apache.camel.Endpoint;
-import org.apache.camel.HeaderFilterStrategyAware;
import org.apache.camel.impl.DefaultComponent;
-import org.apache.camel.spi.HeaderFilterStrategy;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.HttpConnectionParams;
import org.apache.http.params.HttpParams;
import org.apache.http.params.HttpProtocolParams;
-public class JhcComponent extends DefaultComponent implements HeaderFilterStrategyAware {
+public class JhcComponent extends DefaultComponent {
private HttpParams params;
- private HeaderFilterStrategy headerFilterStrategy;
public JhcComponent() {
- setHeaderFilterStrategy(new JhcHeaderFilterStrategy());
params = new BasicHttpParams()
.setIntParameter(HttpConnectionParams.SO_TIMEOUT, 5000)
@@ -57,11 +53,4 @@ protected Endpoint createEndpoint(String uri, String remaining, Map parameters)
return new JhcEndpoint(uri, this, new URI(uri.substring(uri.indexOf(':') + 1)));
}
- public HeaderFilterStrategy getHeaderFilterStrategy() {
- return headerFilterStrategy;
- }
-
- public void setHeaderFilterStrategy(HeaderFilterStrategy strategy) {
- headerFilterStrategy = strategy;
- }
}
diff --git a/components/camel-jhc/src/main/java/org/apache/camel/component/jhc/JhcEndpoint.java b/components/camel-jhc/src/main/java/org/apache/camel/component/jhc/JhcEndpoint.java
index 36131122d0836..6b9677b0a470b 100644
--- a/components/camel-jhc/src/main/java/org/apache/camel/component/jhc/JhcEndpoint.java
+++ b/components/camel-jhc/src/main/java/org/apache/camel/component/jhc/JhcEndpoint.java
@@ -19,7 +19,6 @@
import java.net.URI;
import org.apache.camel.Consumer;
-import org.apache.camel.HeaderFilterStrategyAware;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.impl.DefaultEndpoint;
@@ -37,6 +36,7 @@ public class JhcEndpoint extends DefaultEndpoint {
private HttpParams params;
private URI httpUri;
+ private HeaderFilterStrategy headerFilterStrategy;
public JhcEndpoint(String endpointUri, JhcComponent component, URI httpUri) {
super(endpointUri, component);
@@ -101,11 +101,12 @@ public Consumer createConsumer(Processor processor) throws Exception {
return new JhcConsumer(this, processor);
}
+ public void setHeaderFilterStrategy(HeaderFilterStrategy headerFilterStrategy) {
+ this.headerFilterStrategy = headerFilterStrategy;
+ }
+
public HeaderFilterStrategy getHeaderFilterStrategy() {
- if (getComponent() instanceof HeaderFilterStrategyAware) {
- return ((HeaderFilterStrategyAware)getComponent()).getHeaderFilterStrategy();
- } else {
- return new JhcHeaderFilterStrategy();
- }
+ return headerFilterStrategy;
}
+
}
|
882289b06e9f2adebd916cf8d02980327c6f9614
|
spring-framework
|
getAllInterfacesForClass introspects parent- interfaces as well (SPR-7247)--
|
c
|
https://github.com/spring-projects/spring-framework
|
diff --git a/org.springframework.core/src/main/java/org/springframework/util/ClassUtils.java b/org.springframework.core/src/main/java/org/springframework/util/ClassUtils.java
index f771a5e921b1..3ce2b2b94d5f 100644
--- a/org.springframework.core/src/main/java/org/springframework/util/ClassUtils.java
+++ b/org.springframework.core/src/main/java/org/springframework/util/ClassUtils.java
@@ -22,7 +22,6 @@
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Proxy;
-import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
@@ -30,7 +29,6 @@
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
-import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -971,22 +969,8 @@ public static Class<?>[] getAllInterfacesForClass(Class<?> clazz) {
* @return all interfaces that the given object implements as array
*/
public static Class<?>[] getAllInterfacesForClass(Class<?> clazz, ClassLoader classLoader) {
- Assert.notNull(clazz, "Class must not be null");
- if (clazz.isInterface()) {
- return new Class[] {clazz};
- }
- List<Class<?>> interfaces = new ArrayList<Class<?>>();
- while (clazz != null) {
- Class<?>[] ifcs = clazz.getInterfaces();
- for (Class<?> ifc : ifcs) {
- if (!interfaces.contains(ifc) &&
- (classLoader == null || isVisible(ifc, classLoader))) {
- interfaces.add(ifc);
- }
- }
- clazz = clazz.getSuperclass();
- }
- return interfaces.toArray(new Class[interfaces.size()]);
+ Set<Class> ifcs = getAllInterfacesForClassAsSet(clazz, classLoader);
+ return ifcs.toArray(new Class[ifcs.size()]);
}
/**
@@ -1022,16 +1006,14 @@ public static Set<Class> getAllInterfacesForClassAsSet(Class clazz) {
*/
public static Set<Class> getAllInterfacesForClassAsSet(Class clazz, ClassLoader classLoader) {
Assert.notNull(clazz, "Class must not be null");
- if (clazz.isInterface()) {
+ if (clazz.isInterface() && isVisible(clazz, classLoader)) {
return Collections.singleton(clazz);
}
Set<Class> interfaces = new LinkedHashSet<Class>();
while (clazz != null) {
- for (int i = 0; i < clazz.getInterfaces().length; i++) {
- Class<?> ifc = clazz.getInterfaces()[i];
- if (classLoader == null || isVisible(ifc, classLoader)) {
- interfaces.add(ifc);
- }
+ Class<?>[] ifcs = clazz.getInterfaces();
+ for (Class<?> ifc : ifcs) {
+ interfaces.addAll(getAllInterfacesForClassAsSet(ifc, classLoader));
}
clazz = clazz.getSuperclass();
}
|
736169aa2a46f489cd8e75cf4d61cef997fc456f
|
spring-framework
|
revised WebApplicationContext lookup--
|
p
|
https://github.com/spring-projects/spring-framework
|
diff --git a/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/support/RequestContextUtils.java b/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/support/RequestContextUtils.java
index fed2015a4a71..8859e88a7c37 100644
--- a/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/support/RequestContextUtils.java
+++ b/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/support/RequestContextUtils.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2007 the original author or authors.
+ * Copyright 2002-2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,7 +17,6 @@
package org.springframework.web.servlet.support;
import java.util.Locale;
-
import javax.servlet.ServletContext;
import javax.servlet.ServletRequest;
import javax.servlet.http.HttpServletRequest;
@@ -79,10 +78,7 @@ public static WebApplicationContext getWebApplicationContext(
if (servletContext == null) {
throw new IllegalStateException("No WebApplicationContext found: not in a DispatcherServlet request?");
}
- webApplicationContext = WebApplicationContextUtils.getWebApplicationContext(servletContext);
- if (webApplicationContext == null) {
- throw new IllegalStateException("No WebApplicationContext found: no ContextLoaderListener registered?");
- }
+ webApplicationContext = WebApplicationContextUtils.getRequiredWebApplicationContext(servletContext);
}
return webApplicationContext;
}
diff --git a/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/view/tiles2/AbstractSpringPreparerFactory.java b/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/view/tiles2/AbstractSpringPreparerFactory.java
index 809468636854..96aec8fbd6dc 100644
--- a/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/view/tiles2/AbstractSpringPreparerFactory.java
+++ b/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/view/tiles2/AbstractSpringPreparerFactory.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2007 the original author or authors.
+ * Copyright 2002-2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -16,16 +16,15 @@
package org.springframework.web.servlet.view.tiles2;
-import javax.servlet.ServletRequest;
-
import org.apache.tiles.TilesException;
import org.apache.tiles.context.TilesRequestContext;
import org.apache.tiles.preparer.PreparerFactory;
import org.apache.tiles.preparer.ViewPreparer;
-import org.apache.tiles.servlet.context.ServletTilesApplicationContext;
+import org.apache.tiles.servlet.context.ServletTilesRequestContext;
import org.springframework.web.context.WebApplicationContext;
-import org.springframework.web.servlet.support.RequestContextUtils;
+import org.springframework.web.context.support.WebApplicationContextUtils;
+import org.springframework.web.servlet.DispatcherServlet;
/**
* Abstract implementation of the Tiles2 {@link org.apache.tiles.preparer.PreparerFactory}
@@ -41,20 +40,24 @@
public abstract class AbstractSpringPreparerFactory implements PreparerFactory {
public ViewPreparer getPreparer(String name, TilesRequestContext context) throws TilesException {
- ServletRequest servletRequest = null;
- if (context.getRequest() instanceof ServletRequest) {
- servletRequest = (ServletRequest) context.getRequest();
- }
- ServletTilesApplicationContext tilesApplicationContext = null;
- if (context instanceof ServletTilesApplicationContext) {
- tilesApplicationContext = (ServletTilesApplicationContext) context;
- }
- if (servletRequest == null && tilesApplicationContext == null) {
- throw new IllegalStateException("SpringBeanPreparerFactory requires either a " +
- "ServletRequest or a ServletTilesApplicationContext to operate on");
+ WebApplicationContext webApplicationContext = (WebApplicationContext) context.getRequestScope().get(
+ DispatcherServlet.WEB_APPLICATION_CONTEXT_ATTRIBUTE);
+ if (webApplicationContext == null) {
+ /* as of Tiles 2.1:
+ webApplicationContext = (WebApplicationContext) context.getApplicationContext().getApplicationScope().get(
+ WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE);
+ if (webApplicationContext == null) {
+ throw new IllegalStateException("No WebApplicationContext found: no ContextLoaderListener registered?");
+ }
+ */
+ if (!(context instanceof ServletTilesRequestContext)) {
+ throw new IllegalStateException(
+ getClass().getSimpleName() + " requires a ServletTilesRequestContext to operate on");
+ }
+ ServletTilesRequestContext servletRequestContext = (ServletTilesRequestContext) context;
+ webApplicationContext = WebApplicationContextUtils.getRequiredWebApplicationContext(
+ servletRequestContext.getServletContext());
}
- WebApplicationContext webApplicationContext = RequestContextUtils.getWebApplicationContext(
- servletRequest, tilesApplicationContext.getServletContext());
return getPreparer(name, webApplicationContext);
}
diff --git a/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/view/tiles2/SpringBeanPreparerFactory.java b/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/view/tiles2/SpringBeanPreparerFactory.java
index 3216059f8e73..527177951c94 100644
--- a/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/view/tiles2/SpringBeanPreparerFactory.java
+++ b/org.springframework.web.servlet/src/main/java/org/springframework/web/servlet/view/tiles2/SpringBeanPreparerFactory.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2007 the original author or authors.
+ * Copyright 2002-2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -36,7 +36,7 @@ public class SpringBeanPreparerFactory extends AbstractSpringPreparerFactory {
@Override
protected ViewPreparer getPreparer(String name, WebApplicationContext context) throws TilesException {
- return (ViewPreparer) context.getBean(name, ViewPreparer.class);
+ return context.getBean(name, ViewPreparer.class);
}
}
|
10bc2480001f2c399afa50add8f66b1e22144b14
|
elasticsearch
|
add anotehr test--
|
p
|
https://github.com/elastic/elasticsearch
|
diff --git a/modules/elasticsearch/src/test/java/org/elasticsearch/index/cache/filter/FilterCacheTests.java b/modules/elasticsearch/src/test/java/org/elasticsearch/index/cache/filter/FilterCacheTests.java
index 244d8bb36fcd6..b059847498b57 100644
--- a/modules/elasticsearch/src/test/java/org/elasticsearch/index/cache/filter/FilterCacheTests.java
+++ b/modules/elasticsearch/src/test/java/org/elasticsearch/index/cache/filter/FilterCacheTests.java
@@ -23,7 +23,9 @@
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.ConstantScoreQuery;
+import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.index.Index;
@@ -73,12 +75,13 @@ private void verifyCache(FilterCache filterCache) throws Exception {
reader = refreshReader(reader);
IndexSearcher searcher = new IndexSearcher(reader);
assertThat(Lucene.count(searcher, new ConstantScoreQuery(filterCache.cache(new TermFilter(new Term("id", "1")))), -1), equalTo(1l));
+ assertThat(Lucene.count(searcher, new FilteredQuery(new MatchAllDocsQuery(), filterCache.cache(new TermFilter(new Term("id", "1")))), -1), equalTo(1l));
indexWriter.deleteDocuments(new Term("id", "1"));
reader = refreshReader(reader);
searcher = new IndexSearcher(reader);
assertThat(Lucene.count(searcher, new ConstantScoreQuery(filterCache.cache(new TermFilter(new Term("id", "1")))), -1), equalTo(0l));
-
+ assertThat(Lucene.count(searcher, new FilteredQuery(new MatchAllDocsQuery(), filterCache.cache(new TermFilter(new Term("id", "1")))), -1), equalTo(0l));
indexWriter.close();
}
|
d158a03f09a1df7df66d53a8eda24762f5707718
|
intellij-community
|
editorPaintStart useless in our case--
|
p
|
https://github.com/JetBrains/intellij-community
|
diff --git a/platform/lang-impl/src/com/intellij/execution/console/ConsoleGutterComponent.java b/platform/lang-impl/src/com/intellij/execution/console/ConsoleGutterComponent.java
index 3f88997bdad18..e9881c3b60073 100644
--- a/platform/lang-impl/src/com/intellij/execution/console/ConsoleGutterComponent.java
+++ b/platform/lang-impl/src/com/intellij/execution/console/ConsoleGutterComponent.java
@@ -3,8 +3,6 @@
import com.intellij.codeInsight.hint.TooltipController;
import com.intellij.codeInsight.hint.TooltipGroup;
import com.intellij.ide.ui.UISettings;
-import com.intellij.openapi.application.ApplicationManager;
-import com.intellij.openapi.application.impl.ApplicationImpl;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.VisualPosition;
import com.intellij.openapi.editor.colors.EditorFontType;
@@ -120,40 +118,34 @@ public Dimension getPreferredSize() {
@Override
public void paint(Graphics g) {
- ((ApplicationImpl)ApplicationManager.getApplication()).editorPaintStart();
- try {
- Rectangle clip = g.getClipBounds();
- if (clip.height <= 0 || maxContentWidth == 0) {
+ Rectangle clip = g.getClipBounds();
+ if (clip.height <= 0 || maxContentWidth == 0) {
+ return;
+ }
+
+ if (atLineStart) {
+ // don't paint in the overlapped region
+ if (clip.x >= maxContentWidth) {
return;
}
- if (atLineStart) {
- // don't paint in the overlapped region
- if (clip.x >= maxContentWidth) {
- return;
- }
-
- g.setColor(editor.getBackgroundColor());
- g.fillRect(clip.x, clip.y, Math.min(clip.width, maxContentWidth - clip.x), clip.height);
- }
+ g.setColor(editor.getBackgroundColor());
+ g.fillRect(clip.x, clip.y, Math.min(clip.width, maxContentWidth - clip.x), clip.height);
+ }
- UISettings.setupAntialiasing(g);
+ UISettings.setupAntialiasing(g);
- Graphics2D g2 = (Graphics2D)g;
- Object hint = g2.getRenderingHint(RenderingHints.KEY_ANTIALIASING);
- if (!UIUtil.isRetina()) {
- g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF);
- }
+ Graphics2D g2 = (Graphics2D)g;
+ Object hint = g2.getRenderingHint(RenderingHints.KEY_ANTIALIASING);
+ if (!UIUtil.isRetina()) {
+ g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF);
+ }
- try {
- paintAnnotations(g, clip);
- }
- finally {
- g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, hint);
- }
+ try {
+ paintAnnotations(g, clip);
}
finally {
- ((ApplicationImpl)ApplicationManager.getApplication()).editorPaintFinish();
+ g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, hint);
}
}
|
64bed0460e0bab9157e71192a18b2285bf1ef536
|
hadoop
|
YARN-1063. Augmented Hadoop common winutils to have- the ability to create containers as domain users. Contributed by Remus- Rusanu. Committed as a YARN patch even though all the code changes are in- common.--(cherry picked from commit 5ca97f1e60b8a7848f6eadd15f6c08ed390a8cda)-
|
a
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-common-project/hadoop-common/src/main/winutils/chown.c b/hadoop-common-project/hadoop-common/src/main/winutils/chown.c
index bc2aefc79eeb1..1be81216974a5 100644
--- a/hadoop-common-project/hadoop-common/src/main/winutils/chown.c
+++ b/hadoop-common-project/hadoop-common/src/main/winutils/chown.c
@@ -63,11 +63,11 @@ static DWORD ChangeFileOwnerBySid(__in LPCWSTR path,
// SID is not contained in the caller's token, and have the SE_GROUP_OWNER
// permission enabled.
//
- if (!EnablePrivilege(L"SeTakeOwnershipPrivilege"))
+ if (EnablePrivilege(L"SeTakeOwnershipPrivilege") != ERROR_SUCCESS)
{
fwprintf(stdout, L"INFO: The user does not have SeTakeOwnershipPrivilege.\n");
}
- if (!EnablePrivilege(L"SeRestorePrivilege"))
+ if (EnablePrivilege(L"SeRestorePrivilege") != ERROR_SUCCESS)
{
fwprintf(stdout, L"INFO: The user does not have SeRestorePrivilege.\n");
}
diff --git a/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h b/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h
index 1c0007a6da922..bae754c9b6e25 100644
--- a/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h
+++ b/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h
@@ -27,6 +27,8 @@
#include <accctrl.h>
#include <strsafe.h>
#include <lm.h>
+#include <ntsecapi.h>
+#include <userenv.h>
enum EXIT_CODE
{
@@ -153,6 +155,26 @@ DWORD ChangeFileModeByMask(__in LPCWSTR path, INT mode);
DWORD GetLocalGroupsForUser(__in LPCWSTR user,
__out LPLOCALGROUP_USERS_INFO_0 *groups, __out LPDWORD entries);
-BOOL EnablePrivilege(__in LPCWSTR privilegeName);
-
void GetLibraryName(__in LPCVOID lpAddress, __out LPWSTR *filename);
+
+DWORD EnablePrivilege(__in LPCWSTR privilegeName);
+
+void AssignLsaString(__inout LSA_STRING * target, __in const char *strBuf);
+
+DWORD RegisterWithLsa(__in const char *logonProcessName, __out HANDLE * lsaHandle);
+
+void UnregisterWithLsa(__in HANDLE lsaHandle);
+
+DWORD LookupKerberosAuthenticationPackageId(__in HANDLE lsaHandle, __out ULONG * packageId);
+
+DWORD CreateLogonForUser(__in HANDLE lsaHandle,
+ __in const char * tokenSourceName,
+ __in const char * tokenOriginName,
+ __in ULONG authnPkgId,
+ __in const wchar_t* principalName,
+ __out HANDLE *tokenHandle);
+
+DWORD LoadUserProfileForLogon(__in HANDLE logonHandle, __out PROFILEINFO * pi);
+
+DWORD UnloadProfileForLogon(__in HANDLE logonHandle, __in PROFILEINFO * pi);
+
diff --git a/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c b/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c
index 391247fccd47d..da16ff5b081c4 100644
--- a/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c
+++ b/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c
@@ -17,6 +17,8 @@
#pragma comment(lib, "authz.lib")
#pragma comment(lib, "netapi32.lib")
+#pragma comment(lib, "Secur32.lib")
+#pragma comment(lib, "Userenv.lib")
#include "winutils.h"
#include <authz.h>
#include <sddl.h>
@@ -797,7 +799,6 @@ DWORD FindFileOwnerAndPermission(
__out_opt PINT pMask)
{
DWORD dwRtnCode = 0;
-
PSECURITY_DESCRIPTOR pSd = NULL;
PSID psidOwner = NULL;
@@ -1638,11 +1639,12 @@ DWORD GetLocalGroupsForUser(
// to the process's access token.
//
// Returns:
-// TRUE: on success
+// ERROR_SUCCESS on success
+// GetLastError() on error
//
// Notes:
//
-BOOL EnablePrivilege(__in LPCWSTR privilegeName)
+DWORD EnablePrivilege(__in LPCWSTR privilegeName)
{
HANDLE hToken = INVALID_HANDLE_VALUE;
TOKEN_PRIVILEGES tp = { 0 };
@@ -1651,28 +1653,31 @@ BOOL EnablePrivilege(__in LPCWSTR privilegeName)
if (!OpenProcessToken(GetCurrentProcess(),
TOKEN_ADJUST_PRIVILEGES | TOKEN_QUERY, &hToken))
{
- ReportErrorCode(L"OpenProcessToken", GetLastError());
- return FALSE;
+ dwErrCode = GetLastError();
+ ReportErrorCode(L"OpenProcessToken", dwErrCode);
+ return dwErrCode;
}
tp.PrivilegeCount = 1;
if (!LookupPrivilegeValueW(NULL,
privilegeName, &(tp.Privileges[0].Luid)))
{
- ReportErrorCode(L"LookupPrivilegeValue", GetLastError());
+ dwErrCode = GetLastError();
+ ReportErrorCode(L"LookupPrivilegeValue", dwErrCode);
CloseHandle(hToken);
- return FALSE;
+ return dwErrCode;
}
tp.Privileges[0].Attributes = SE_PRIVILEGE_ENABLED;
// As stated on MSDN, we need to use GetLastError() to check if
// AdjustTokenPrivileges() adjusted all of the specified privileges.
//
- AdjustTokenPrivileges(hToken, FALSE, &tp, 0, NULL, NULL);
+ if( !AdjustTokenPrivileges(hToken, FALSE, &tp, 0, NULL, NULL) ) {
dwErrCode = GetLastError();
+ }
CloseHandle(hToken);
- return dwErrCode == ERROR_SUCCESS;
+ return dwErrCode;
}
//----------------------------------------------------------------------------
@@ -1716,9 +1721,6 @@ void ReportErrorCode(LPCWSTR func, DWORD err)
// Description:
// Given an address, get the file name of the library from which it was loaded.
//
-// Returns:
-// None
-//
// Notes:
// - The function allocates heap memory and points the filename out parameter to
// the newly allocated memory, which will contain the name of the file.
@@ -1757,3 +1759,290 @@ void GetLibraryName(LPCVOID lpAddress, LPWSTR *filename)
*filename = NULL;
}
}
+
+// Function: AssignLsaString
+//
+// Description:
+// fills in values of LSA_STRING struct to point to a string buffer
+//
+// Returns:
+// None
+//
+// IMPORTANT*** strBuf is not copied. It must be globally immutable
+//
+void AssignLsaString(__inout LSA_STRING * target, __in const char *strBuf)
+{
+ target->Length = (USHORT)(sizeof(char)*strlen(strBuf));
+ target->MaximumLength = target->Length;
+ target->Buffer = (char *)(strBuf);
+}
+
+//----------------------------------------------------------------------------
+// Function: RegisterWithLsa
+//
+// Description:
+// Registers with local security authority and sets handle for use in later LSA
+// operations
+//
+// Returns:
+// ERROR_SUCCESS on success
+// Other error code on failure
+//
+// Notes:
+//
+DWORD RegisterWithLsa(__in const char *logonProcessName, __out HANDLE * lsaHandle)
+{
+ LSA_STRING processName;
+ LSA_OPERATIONAL_MODE o_mode; // never useful as per msdn docs
+ NTSTATUS registerStatus;
+ *lsaHandle = 0;
+
+ AssignLsaString(&processName, logonProcessName);
+ registerStatus = LsaRegisterLogonProcess(&processName, lsaHandle, &o_mode);
+
+ return LsaNtStatusToWinError( registerStatus );
+}
+
+//----------------------------------------------------------------------------
+// Function: UnregisterWithLsa
+//
+// Description:
+// Closes LSA handle allocated by RegisterWithLsa()
+//
+// Returns:
+// None
+//
+// Notes:
+//
+void UnregisterWithLsa(__in HANDLE lsaHandle)
+{
+ LsaClose(lsaHandle);
+}
+
+//----------------------------------------------------------------------------
+// Function: LookupKerberosAuthenticationPackageId
+//
+// Description:
+// Looks of the current id (integer index) of the Kerberos authentication package on the local
+// machine.
+//
+// Returns:
+// ERROR_SUCCESS on success
+// Other error code on failure
+//
+// Notes:
+//
+DWORD LookupKerberosAuthenticationPackageId(__in HANDLE lsaHandle, __out ULONG * packageId)
+{
+ NTSTATUS lookupStatus;
+ LSA_STRING pkgName;
+
+ AssignLsaString(&pkgName, MICROSOFT_KERBEROS_NAME_A);
+ lookupStatus = LsaLookupAuthenticationPackage(lsaHandle, &pkgName, packageId);
+ return LsaNtStatusToWinError( lookupStatus );
+}
+
+//----------------------------------------------------------------------------
+// Function: CreateLogonForUser
+//
+// Description:
+// Contacts the local LSA and performs a logon without credential for the
+// given principal. This logon token will be local machine only and have no
+// network credentials attached.
+//
+// Returns:
+// ERROR_SUCCESS on success
+// Other error code on failure
+//
+// Notes:
+// This call assumes that all required privileges have already been enabled (TCB etc).
+// IMPORTANT **** tokenOriginName must be immutable!
+//
+DWORD CreateLogonForUser(__in HANDLE lsaHandle,
+ __in const char * tokenSourceName,
+ __in const char * tokenOriginName, // must be immutable, will not be copied!
+ __in ULONG authnPkgId,
+ __in const wchar_t* principalName,
+ __out HANDLE *tokenHandle)
+{
+ DWORD logonStatus = ERROR_ASSERTION_FAILURE; // Failure to set status should trigger error
+ TOKEN_SOURCE tokenSource;
+ LSA_STRING originName;
+ void * profile = NULL;
+
+ // from MSDN:
+ // The ClientUpn and ClientRealm members of the KERB_S4U_LOGON
+ // structure must point to buffers in memory that are contiguous
+ // to the structure itself. The value of the
+ // AuthenticationInformationLength parameter must take into
+ // account the length of these buffers.
+ const int principalNameBufLen = lstrlen(principalName)*sizeof(*principalName);
+ const int totalAuthInfoLen = sizeof(KERB_S4U_LOGON) + principalNameBufLen;
+ KERB_S4U_LOGON* s4uLogonAuthInfo = (KERB_S4U_LOGON*)calloc(totalAuthInfoLen, 1);
+ if (s4uLogonAuthInfo == NULL ) {
+ logonStatus = ERROR_NOT_ENOUGH_MEMORY;
+ goto done;
+ }
+ s4uLogonAuthInfo->MessageType = KerbS4ULogon;
+ s4uLogonAuthInfo->ClientUpn.Buffer = (wchar_t*)((char*)s4uLogonAuthInfo + sizeof *s4uLogonAuthInfo);
+ CopyMemory(s4uLogonAuthInfo->ClientUpn.Buffer, principalName, principalNameBufLen);
+ s4uLogonAuthInfo->ClientUpn.Length = (USHORT)principalNameBufLen;
+ s4uLogonAuthInfo->ClientUpn.MaximumLength = (USHORT)principalNameBufLen;
+
+ AllocateLocallyUniqueId(&tokenSource.SourceIdentifier);
+ StringCchCopyA(tokenSource.SourceName, TOKEN_SOURCE_LENGTH, tokenSourceName );
+ AssignLsaString(&originName, tokenOriginName);
+
+ {
+ DWORD cbProfile = 0;
+ LUID logonId;
+ QUOTA_LIMITS quotaLimits;
+ NTSTATUS subStatus;
+
+ NTSTATUS logonNtStatus = LsaLogonUser(lsaHandle,
+ &originName,
+ Batch, // SECURITY_LOGON_TYPE
+ authnPkgId,
+ s4uLogonAuthInfo,
+ totalAuthInfoLen,
+ 0,
+ &tokenSource,
+ &profile,
+ &cbProfile,
+ &logonId,
+ tokenHandle,
+ "aLimits,
+ &subStatus);
+ logonStatus = LsaNtStatusToWinError( logonNtStatus );
+ }
+done:
+ // clean up
+ if (s4uLogonAuthInfo != NULL) {
+ free(s4uLogonAuthInfo);
+ }
+ if (profile != NULL) {
+ LsaFreeReturnBuffer(profile);
+ }
+ return logonStatus;
+}
+
+// NOTE: must free allocatedName
+DWORD GetNameFromLogonToken(__in HANDLE logonToken, __out wchar_t **allocatedName)
+{
+ DWORD userInfoSize = 0;
+ PTOKEN_USER user = NULL;
+ DWORD userNameSize = 0;
+ wchar_t * userName = NULL;
+ DWORD domainNameSize = 0;
+ wchar_t * domainName = NULL;
+ SID_NAME_USE sidUse = SidTypeUnknown;
+ DWORD getNameStatus = ERROR_ASSERTION_FAILURE; // Failure to set status should trigger error
+ BOOL tokenInformation = FALSE;
+
+ // call for sid size then alloc and call for sid
+ tokenInformation = GetTokenInformation(logonToken, TokenUser, NULL, 0, &userInfoSize);
+ assert (FALSE == tokenInformation);
+
+ // last call should have failed and filled in allocation size
+ if ((getNameStatus = GetLastError()) != ERROR_INSUFFICIENT_BUFFER)
+ {
+ goto done;
+ }
+ user = (PTOKEN_USER)calloc(userInfoSize,1);
+ if (user == NULL)
+ {
+ getNameStatus = ERROR_NOT_ENOUGH_MEMORY;
+ goto done;
+ }
+ if (!GetTokenInformation(logonToken, TokenUser, user, userInfoSize, &userInfoSize)) {
+ getNameStatus = GetLastError();
+ goto done;
+ }
+ LookupAccountSid( NULL, user->User.Sid, NULL, &userNameSize, NULL, &domainNameSize, &sidUse );
+ // last call should have failed and filled in allocation size
+ if ((getNameStatus = GetLastError()) != ERROR_INSUFFICIENT_BUFFER)
+ {
+ goto done;
+ }
+ userName = (wchar_t *)calloc(userNameSize, sizeof(wchar_t));
+ if (userName == NULL) {
+ getNameStatus = ERROR_NOT_ENOUGH_MEMORY;
+ goto done;
+ }
+ domainName = (wchar_t *)calloc(domainNameSize, sizeof(wchar_t));
+ if (domainName == NULL) {
+ getNameStatus = ERROR_NOT_ENOUGH_MEMORY;
+ goto done;
+ }
+ if (!LookupAccountSid( NULL, user->User.Sid, userName, &userNameSize, domainName, &domainNameSize, &sidUse )) {
+ getNameStatus = GetLastError();
+ goto done;
+ }
+
+ getNameStatus = ERROR_SUCCESS;
+ *allocatedName = userName;
+ userName = NULL;
+done:
+ if (user != NULL) {
+ free( user );
+ user = NULL;
+ }
+ if (userName != NULL) {
+ free( userName );
+ userName = NULL;
+ }
+ if (domainName != NULL) {
+ free( domainName );
+ domainName = NULL;
+ }
+ return getNameStatus;
+}
+
+DWORD LoadUserProfileForLogon(__in HANDLE logonHandle, __out PROFILEINFO * pi)
+{
+ wchar_t *userName = NULL;
+ DWORD loadProfileStatus = ERROR_ASSERTION_FAILURE; // Failure to set status should trigger error
+
+ loadProfileStatus = GetNameFromLogonToken( logonHandle, &userName );
+ if (loadProfileStatus != ERROR_SUCCESS) {
+ goto done;
+ }
+
+ assert(pi);
+
+ ZeroMemory( pi, sizeof(*pi) );
+ pi->dwSize = sizeof(*pi);
+ pi->lpUserName = userName;
+ pi->dwFlags = PI_NOUI;
+
+ // if the profile does not exist it will be created
+ if ( !LoadUserProfile( logonHandle, pi ) ) {
+ loadProfileStatus = GetLastError();
+ goto done;
+ }
+
+ loadProfileStatus = ERROR_SUCCESS;
+done:
+ return loadProfileStatus;
+}
+
+DWORD UnloadProfileForLogon(__in HANDLE logonHandle, __in PROFILEINFO * pi)
+{
+ DWORD touchProfileStatus = ERROR_ASSERTION_FAILURE; // Failure to set status should trigger error
+
+ assert(pi);
+
+ if ( !UnloadUserProfile(logonHandle, pi->hProfile ) ) {
+ touchProfileStatus = GetLastError();
+ goto done;
+ }
+ if (pi->lpUserName != NULL) {
+ free(pi->lpUserName);
+ pi->lpUserName = NULL;
+ }
+ ZeroMemory( pi, sizeof(*pi) );
+
+ touchProfileStatus = ERROR_SUCCESS;
+done:
+ return touchProfileStatus;
+}
diff --git a/hadoop-common-project/hadoop-common/src/main/winutils/symlink.c b/hadoop-common-project/hadoop-common/src/main/winutils/symlink.c
index ea372cc06dc53..02acd4d2a40e8 100644
--- a/hadoop-common-project/hadoop-common/src/main/winutils/symlink.c
+++ b/hadoop-common-project/hadoop-common/src/main/winutils/symlink.c
@@ -77,7 +77,7 @@ int Symlink(__in int argc, __in_ecount(argc) wchar_t *argv[])
// This is just an additional step to do the privilege check by not using
// error code from CreateSymbolicLink() method.
//
- if (!EnablePrivilege(L"SeCreateSymbolicLinkPrivilege"))
+ if (EnablePrivilege(L"SeCreateSymbolicLinkPrivilege") != ERROR_SUCCESS)
{
fwprintf(stderr,
L"No privilege to create symbolic links.\n");
diff --git a/hadoop-common-project/hadoop-common/src/main/winutils/task.c b/hadoop-common-project/hadoop-common/src/main/winutils/task.c
index 19bda96a1e6ed..783f162322bd7 100644
--- a/hadoop-common-project/hadoop-common/src/main/winutils/task.c
+++ b/hadoop-common-project/hadoop-common/src/main/winutils/task.c
@@ -18,6 +18,7 @@
#include "winutils.h"
#include <errno.h>
#include <psapi.h>
+#include <malloc.h>
#define PSAPI_VERSION 1
#pragma comment(lib, "psapi.lib")
@@ -28,12 +29,18 @@
// process exits with 128 + signal. For SIGKILL, this would be 128 + 9 = 137.
#define KILLED_PROCESS_EXIT_CODE 137
+// Name for tracking this logon process when registering with LSA
+static const char *LOGON_PROCESS_NAME="Hadoop Container Executor";
+// Name for token source, must be less or eq to TOKEN_SOURCE_LENGTH (currently 8) chars
+static const char *TOKEN_SOURCE_NAME = "HadoopEx";
+
// List of different task related command line options supported by
// winutils.
typedef enum TaskCommandOptionType
{
TaskInvalid,
TaskCreate,
+ TaskCreateAsUser,
TaskIsAlive,
TaskKill,
TaskProcessList
@@ -86,37 +93,53 @@ static BOOL ParseCommandLine(__in int argc,
}
}
+ if (argc >= 6) {
+ if (wcscmp(argv[1], L"createAsUser") == 0)
+ {
+ *command = TaskCreateAsUser;
+ return TRUE;
+ }
+ }
+
return FALSE;
}
//----------------------------------------------------------------------------
-// Function: createTask
+// Function: CreateTaskImpl
//
// Description:
// Creates a task via a jobobject. Outputs the
// appropriate information to stdout on success, or stderr on failure.
+// logonHandle may be NULL, in this case the current logon will be utilized for the
+// created process
//
// Returns:
// ERROR_SUCCESS: On success
// GetLastError: otherwise
-DWORD createTask(__in PCWSTR jobObjName,__in PWSTR cmdLine)
+DWORD CreateTaskImpl(__in_opt HANDLE logonHandle, __in PCWSTR jobObjName,__in PWSTR cmdLine)
{
- DWORD err = ERROR_SUCCESS;
+ DWORD dwErrorCode = ERROR_SUCCESS;
DWORD exitCode = EXIT_FAILURE;
+ DWORD currDirCnt = 0;
STARTUPINFO si;
PROCESS_INFORMATION pi;
HANDLE jobObject = NULL;
JOBOBJECT_EXTENDED_LIMIT_INFORMATION jeli = { 0 };
+ void * envBlock = NULL;
+ BOOL createProcessResult = FALSE;
+
+ wchar_t* curr_dir = NULL;
+ FILE *stream = NULL;
// Create un-inheritable job object handle and set job object to terminate
// when last handle is closed. So winutils.exe invocation has the only open
// job object handle. Exit of winutils.exe ensures termination of job object.
// Either a clean exit of winutils or crash or external termination.
jobObject = CreateJobObject(NULL, jobObjName);
- err = GetLastError();
- if(jobObject == NULL || err == ERROR_ALREADY_EXISTS)
+ dwErrorCode = GetLastError();
+ if(jobObject == NULL || dwErrorCode == ERROR_ALREADY_EXISTS)
{
- return err;
+ return dwErrorCode;
}
jeli.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
if(SetInformationJobObject(jobObject,
@@ -124,36 +147,102 @@ DWORD createTask(__in PCWSTR jobObjName,__in PWSTR cmdLine)
&jeli,
sizeof(jeli)) == 0)
{
- err = GetLastError();
+ dwErrorCode = GetLastError();
CloseHandle(jobObject);
- return err;
+ return dwErrorCode;
}
if(AssignProcessToJobObject(jobObject, GetCurrentProcess()) == 0)
{
- err = GetLastError();
+ dwErrorCode = GetLastError();
CloseHandle(jobObject);
- return err;
+ return dwErrorCode;
}
// the child JVM uses this env var to send the task OS process identifier
// to the TaskTracker. We pass the job object name.
if(SetEnvironmentVariable(L"JVM_PID", jobObjName) == 0)
{
- err = GetLastError();
- CloseHandle(jobObject);
- return err;
+ dwErrorCode = GetLastError();
+ // We have to explictly Terminate, passing in the error code
+ // simply closing the job would kill our own process with success exit status
+ TerminateJobObject(jobObject, dwErrorCode);
+ return dwErrorCode;
}
ZeroMemory( &si, sizeof(si) );
si.cb = sizeof(si);
ZeroMemory( &pi, sizeof(pi) );
- if (CreateProcess(NULL, cmdLine, NULL, NULL, TRUE, 0, NULL, NULL, &si, &pi) == 0)
- {
- err = GetLastError();
- CloseHandle(jobObject);
- return err;
+ if( logonHandle != NULL ) {
+ // create user environment for this logon
+ if(!CreateEnvironmentBlock(&envBlock,
+ logonHandle,
+ TRUE )) {
+ dwErrorCode = GetLastError();
+ // We have to explictly Terminate, passing in the error code
+ // simply closing the job would kill our own process with success exit status
+ TerminateJobObject(jobObject, dwErrorCode);
+ return dwErrorCode;
+ }
+ }
+
+ // Get the required buffer size first
+ currDirCnt = GetCurrentDirectory(0, NULL);
+ if (0 < currDirCnt) {
+ curr_dir = (wchar_t*) alloca(currDirCnt * sizeof(wchar_t));
+ assert(curr_dir);
+ currDirCnt = GetCurrentDirectory(currDirCnt, curr_dir);
+ }
+
+ if (0 == currDirCnt) {
+ dwErrorCode = GetLastError();
+ // We have to explictly Terminate, passing in the error code
+ // simply closing the job would kill our own process with success exit status
+ TerminateJobObject(jobObject, dwErrorCode);
+ return dwErrorCode;
+ }
+
+ if (logonHandle == NULL) {
+ createProcessResult = CreateProcess(
+ NULL, // ApplicationName
+ cmdLine, // command line
+ NULL, // process security attributes
+ NULL, // thread security attributes
+ TRUE, // inherit handles
+ 0, // creation flags
+ NULL, // environment
+ curr_dir, // current directory
+ &si, // startup info
+ &pi); // process info
+ }
+ else {
+ createProcessResult = CreateProcessAsUser(
+ logonHandle, // logon token handle
+ NULL, // Application handle
+ cmdLine, // command line
+ NULL, // process security attributes
+ NULL, // thread security attributes
+ FALSE, // inherit handles
+ CREATE_UNICODE_ENVIRONMENT, // creation flags
+ envBlock, // environment
+ curr_dir, // current directory
+ &si, // startup info
+ &pi); // process info
+ }
+
+ if (FALSE == createProcessResult) {
+ dwErrorCode = GetLastError();
+ if( envBlock != NULL ) {
+ DestroyEnvironmentBlock( envBlock );
+ envBlock = NULL;
+ }
+ // We have to explictly Terminate, passing in the error code
+ // simply closing the job would kill our own process with success exit status
+ TerminateJobObject(jobObject, dwErrorCode);
+
+ // This is tehnically dead code, we cannot reach this condition
+ return dwErrorCode;
}
CloseHandle(pi.hThread);
@@ -162,10 +251,15 @@ DWORD createTask(__in PCWSTR jobObjName,__in PWSTR cmdLine)
WaitForSingleObject( pi.hProcess, INFINITE );
if(GetExitCodeProcess(pi.hProcess, &exitCode) == 0)
{
- err = GetLastError();
+ dwErrorCode = GetLastError();
}
CloseHandle( pi.hProcess );
+ if( envBlock != NULL ) {
+ DestroyEnvironmentBlock( envBlock );
+ envBlock = NULL;
+ }
+
// Terminate job object so that all spawned processes are also killed.
// This is needed because once this process closes the handle to the job
// object and none of the spawned objects have the handle open (via
@@ -173,21 +267,134 @@ DWORD createTask(__in PCWSTR jobObjName,__in PWSTR cmdLine)
// program (say winutils task kill) to terminate this job object via its name.
if(TerminateJobObject(jobObject, exitCode) == 0)
{
- err = GetLastError();
+ dwErrorCode = GetLastError();
}
- // comes here only on failure or TerminateJobObject
+ // comes here only on failure of TerminateJobObject
CloseHandle(jobObject);
- if(err != ERROR_SUCCESS)
+ if(dwErrorCode != ERROR_SUCCESS)
{
- return err;
+ return dwErrorCode;
}
return exitCode;
}
//----------------------------------------------------------------------------
-// Function: isTaskAlive
+// Function: CreateTask
+//
+// Description:
+// Creates a task via a jobobject. Outputs the
+// appropriate information to stdout on success, or stderr on failure.
+//
+// Returns:
+// ERROR_SUCCESS: On success
+// GetLastError: otherwise
+DWORD CreateTask(__in PCWSTR jobObjName,__in PWSTR cmdLine)
+{
+ // call with null logon in order to create tasks utilizing the current logon
+ return CreateTaskImpl( NULL, jobObjName, cmdLine );
+}
+//----------------------------------------------------------------------------
+// Function: CreateTask
+//
+// Description:
+// Creates a task via a jobobject. Outputs the
+// appropriate information to stdout on success, or stderr on failure.
+//
+// Returns:
+// ERROR_SUCCESS: On success
+// GetLastError: otherwise
+DWORD CreateTaskAsUser(__in PCWSTR jobObjName,__in PWSTR user, __in PWSTR pidFilePath, __in PWSTR cmdLine)
+{
+ DWORD err = ERROR_SUCCESS;
+ DWORD exitCode = EXIT_FAILURE;
+ ULONG authnPkgId;
+ HANDLE lsaHandle = INVALID_HANDLE_VALUE;
+ PROFILEINFO pi;
+ BOOL profileIsLoaded = FALSE;
+ FILE* pidFile = NULL;
+
+ DWORD retLen = 0;
+ HANDLE logonHandle = NULL;
+
+ err = EnablePrivilege(SE_TCB_NAME);
+ if( err != ERROR_SUCCESS ) {
+ fwprintf(stdout, L"INFO: The user does not have SE_TCB_NAME.\n");
+ goto done;
+ }
+ err = EnablePrivilege(SE_ASSIGNPRIMARYTOKEN_NAME);
+ if( err != ERROR_SUCCESS ) {
+ fwprintf(stdout, L"INFO: The user does not have SE_ASSIGNPRIMARYTOKEN_NAME.\n");
+ goto done;
+ }
+ err = EnablePrivilege(SE_INCREASE_QUOTA_NAME);
+ if( err != ERROR_SUCCESS ) {
+ fwprintf(stdout, L"INFO: The user does not have SE_INCREASE_QUOTA_NAME.\n");
+ goto done;
+ }
+ err = EnablePrivilege(SE_RESTORE_NAME);
+ if( err != ERROR_SUCCESS ) {
+ fwprintf(stdout, L"INFO: The user does not have SE_RESTORE_NAME.\n");
+ goto done;
+ }
+
+ err = RegisterWithLsa(LOGON_PROCESS_NAME ,&lsaHandle);
+ if( err != ERROR_SUCCESS ) goto done;
+
+ err = LookupKerberosAuthenticationPackageId( lsaHandle, &authnPkgId );
+ if( err != ERROR_SUCCESS ) goto done;
+
+ err = CreateLogonForUser(lsaHandle,
+ LOGON_PROCESS_NAME,
+ TOKEN_SOURCE_NAME,
+ authnPkgId,
+ user,
+ &logonHandle);
+ if( err != ERROR_SUCCESS ) goto done;
+
+ err = LoadUserProfileForLogon(logonHandle, &pi);
+ if( err != ERROR_SUCCESS ) goto done;
+ profileIsLoaded = TRUE;
+
+ // Create the PID file
+
+ if (!(pidFile = _wfopen(pidFilePath, "w"))) {
+ err = GetLastError();
+ goto done;
+ }
+
+ if (0 > fprintf_s(pidFile, "%ls", jobObjName)) {
+ err = GetLastError();
+ }
+
+ fclose(pidFile);
+
+ if (err != ERROR_SUCCESS) {
+ goto done;
+ }
+
+ err = CreateTaskImpl(logonHandle, jobObjName, cmdLine);
+
+done:
+ if( profileIsLoaded ) {
+ UnloadProfileForLogon( logonHandle, &pi );
+ profileIsLoaded = FALSE;
+ }
+ if( logonHandle != NULL ) {
+ CloseHandle(logonHandle);
+ }
+
+ if (INVALID_HANDLE_VALUE != lsaHandle) {
+ UnregisterWithLsa(lsaHandle);
+ }
+
+ return err;
+}
+
+
+//----------------------------------------------------------------------------
+// Function: IsTaskAlive
//
// Description:
// Checks if a task is alive via a jobobject. Outputs the
@@ -196,7 +403,7 @@ DWORD createTask(__in PCWSTR jobObjName,__in PWSTR cmdLine)
// Returns:
// ERROR_SUCCESS: On success
// GetLastError: otherwise
-DWORD isTaskAlive(const WCHAR* jobObjName, int* isAlive, int* procsInJob)
+DWORD IsTaskAlive(const WCHAR* jobObjName, int* isAlive, int* procsInJob)
{
PJOBOBJECT_BASIC_PROCESS_ID_LIST procList;
HANDLE jobObject = NULL;
@@ -247,7 +454,7 @@ DWORD isTaskAlive(const WCHAR* jobObjName, int* isAlive, int* procsInJob)
}
//----------------------------------------------------------------------------
-// Function: killTask
+// Function: KillTask
//
// Description:
// Kills a task via a jobobject. Outputs the
@@ -256,7 +463,7 @@ DWORD isTaskAlive(const WCHAR* jobObjName, int* isAlive, int* procsInJob)
// Returns:
// ERROR_SUCCESS: On success
// GetLastError: otherwise
-DWORD killTask(PCWSTR jobObjName)
+DWORD KillTask(PCWSTR jobObjName)
{
HANDLE jobObject = OpenJobObject(JOB_OBJECT_TERMINATE, FALSE, jobObjName);
if(jobObject == NULL)
@@ -280,7 +487,7 @@ DWORD killTask(PCWSTR jobObjName)
}
//----------------------------------------------------------------------------
-// Function: printTaskProcessList
+// Function: PrintTaskProcessList
//
// Description:
// Prints resource usage of all processes in the task jobobject
@@ -288,7 +495,7 @@ DWORD killTask(PCWSTR jobObjName)
// Returns:
// ERROR_SUCCESS: On success
// GetLastError: otherwise
-DWORD printTaskProcessList(const WCHAR* jobObjName)
+DWORD PrintTaskProcessList(const WCHAR* jobObjName)
{
DWORD i;
PJOBOBJECT_BASIC_PROCESS_ID_LIST procList;
@@ -372,6 +579,21 @@ int Task(__in int argc, __in_ecount(argc) wchar_t *argv[])
{
DWORD dwErrorCode = ERROR_SUCCESS;
TaskCommandOption command = TaskInvalid;
+ wchar_t* cmdLine = NULL;
+ wchar_t buffer[16*1024] = L""; // 32K max command line
+ size_t charCountBufferLeft = sizeof
(buffer)/sizeof(wchar_t);
+ int crtArgIndex = 0;
+ size_t argLen = 0;
+ size_t wscatErr = 0;
+ wchar_t* insertHere = NULL;
+
+ enum {
+ ARGC_JOBOBJECTNAME = 2,
+ ARGC_USERNAME,
+ ARGC_PIDFILE,
+ ARGC_COMMAND,
+ ARGC_COMMAND_ARGS
+ };
if (!ParseCommandLine(argc, argv, &command)) {
dwErrorCode = ERROR_INVALID_COMMAND_LINE;
@@ -385,10 +607,57 @@ int Task(__in int argc, __in_ecount(argc) wchar_t *argv[])
{
// Create the task jobobject
//
- dwErrorCode = createTask(argv[2], argv[3]);
+ dwErrorCode = CreateTask(argv[2], argv[3]);
+ if (dwErrorCode != ERROR_SUCCESS)
+ {
+ ReportErrorCode(L"CreateTask", dwErrorCode);
+ goto TaskExit;
+ }
+ } else if (command == TaskCreateAsUser)
+ {
+ // Create the task jobobject as a domain user
+ // createAsUser accepts an open list of arguments. All arguments after the command are
+ // to be passed as argumrnts to the command itself.Here we're concatenating all
+ // arguments after the command into a single arg entry.
+ //
+ cmdLine = argv[ARGC_COMMAND];
+ if (argc > ARGC_COMMAND_ARGS) {
+ crtArgIndex = ARGC_COMMAND;
+ insertHere = buffer;
+ while (crtArgIndex < argc) {
+ argLen = wcslen(argv[crtArgIndex]);
+ wscatErr = wcscat_s(insertHere, charCountBufferLeft, argv[crtArgIndex]);
+ switch (wscatErr) {
+ case 0:
+ // 0 means success;
+ break;
+ case EINVAL:
+ dwErrorCode = ERROR_INVALID_PARAMETER;
+ goto TaskExit;
+ case ERANGE:
+ dwErrorCode = ERROR_INSUFFICIENT_BUFFER;
+ goto TaskExit;
+ default:
+ // This case is not MSDN documented.
+ dwErrorCode = ERROR_GEN_FAILURE;
+ goto TaskExit;
+ }
+ insertHere += argLen;
+ charCountBufferLeft -= argLen;
+ insertHere[0] = L' ';
+ insertHere += 1;
+ charCountBufferLeft -= 1;
+ insertHere[0] = 0;
+ ++crtArgIndex;
+ }
+ cmdLine = buffer;
+ }
+
+ dwErrorCode = CreateTaskAsUser(
+ argv[ARGC_JOBOBJECTNAME], argv[ARGC_USERNAME], argv[ARGC_PIDFILE], cmdLine);
if (dwErrorCode != ERROR_SUCCESS)
{
- ReportErrorCode(L"createTask", dwErrorCode);
+ ReportErrorCode(L"CreateTaskAsUser", dwErrorCode);
goto TaskExit;
}
} else if (command == TaskIsAlive)
@@ -397,10 +666,10 @@ int Task(__in int argc, __in_ecount(argc) wchar_t *argv[])
//
int isAlive;
int numProcs;
- dwErrorCode = isTaskAlive(argv[2], &isAlive, &numProcs);
+ dwErrorCode = IsTaskAlive(argv[2], &isAlive, &numProcs);
if (dwErrorCode != ERROR_SUCCESS)
{
- ReportErrorCode(L"isTaskAlive", dwErrorCode);
+ ReportErrorCode(L"IsTaskAlive", dwErrorCode);
goto TaskExit;
}
@@ -412,27 +681,27 @@ int Task(__in int argc, __in_ecount(argc) wchar_t *argv[])
else
{
dwErrorCode = ERROR_TASK_NOT_ALIVE;
- ReportErrorCode(L"isTaskAlive returned false", dwErrorCode);
+ ReportErrorCode(L"IsTaskAlive returned false", dwErrorCode);
goto TaskExit;
}
} else if (command == TaskKill)
{
// Check if task jobobject
//
- dwErrorCode = killTask(argv[2]);
+ dwErrorCode = KillTask(argv[2]);
if (dwErrorCode != ERROR_SUCCESS)
{
- ReportErrorCode(L"killTask", dwErrorCode);
+ ReportErrorCode(L"KillTask", dwErrorCode);
goto TaskExit;
}
} else if (command == TaskProcessList)
{
// Check if task jobobject
//
- dwErrorCode = printTaskProcessList(argv[2]);
+ dwErrorCode = PrintTaskProcessList(argv[2]);
if (dwErrorCode != ERROR_SUCCESS)
{
- ReportErrorCode(L"printTaskProcessList", dwErrorCode);
+ ReportErrorCode(L"PrintTaskProcessList", dwErrorCode);
goto TaskExit;
}
} else
@@ -453,10 +722,12 @@ void TaskUsage()
// ProcessTree.isSetsidSupported()
fwprintf(stdout, L"\
Usage: task create [TASKNAME] [COMMAND_LINE] |\n\
+ task createAsUser [TASKNAME] [USERNAME] [PIDFILE] [COMMAND_LINE] |\n\
task isAlive [TASKNAME] |\n\
task kill [TASKNAME]\n\
task processList [TASKNAME]\n\
Creates a new task jobobject with taskname\n\
+ Creates a new task jobobject with taskname as the user provided\n\
Checks if task jobobject is alive\n\
Kills task jobobject\n\
Prints to stdout a list of processes in the task\n\
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
index 588b21761ca81..953039d937a07 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
@@ -20,10 +20,12 @@
import static org.junit.Assert.*;
import static org.junit.Assume.assumeTrue;
+import static org.junit.matchers.JUnitMatchers.containsString;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
+import java.io.FileWriter;
import java.io.IOException;
import org.apache.commons.io.FileUtils;
@@ -33,7 +35,7 @@
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
-import static org.junit.Assume.*;
+
import static org.hamcrest.CoreMatchers.*;
/**
@@ -521,4 +523,26 @@ public void testReadLink() throws IOException {
assertThat(ece.getExitCode(), is(1));
}
}
+
+ @SuppressWarnings("deprecation")
+ @Test(timeout=10000)
+ public void testTaskCreate() throws IOException {
+ File batch = new File(TEST_DIR, "testTaskCreate.cmd");
+ File proof = new File(TEST_DIR, "testTaskCreate.out");
+ FileWriter fw = new FileWriter(batch);
+ String testNumber = String.format("%f", Math.random());
+ fw.write(String.format("echo %s > \"%s\"", testNumber, proof.getAbsolutePath()));
+ fw.close();
+
+ assertFalse(proof.exists());
+
+ Shell.execCommand(Shell.WINUTILS, "task", "create", "testTaskCreate" + testNumber,
+ batch.getAbsolutePath());
+
+ assertTrue(proof.exists());
+
+ String outNumber = FileUtils.readFileToString(proof);
+
+ assertThat(outNumber, containsString(testNumber));
+ }
}
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 47a7e2cb45b17..e8db12ddc5e57 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -88,6 +88,9 @@ Release 2.6.0 - UNRELEASED
YARN-2581. Passed LogAggregationContext to NM via ContainerTokenIdentifier.
(Xuan Gong via zjshen)
+ YARN-1063. Augmented Hadoop common winutils to have the ability to create
+ containers as domain users. (Remus Rusanu via vinodkv)
+
IMPROVEMENTS
YARN-2242. Improve exception information on AM launch crashes. (Li Lu
|
02c28943a09f9706049a5dd1dee220c7d2acd353
|
kotlin
|
Minor refactor--
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/resolver/JavaPropertyResolver.java b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/resolver/JavaPropertyResolver.java
index b185a9e3a090d..31dbd67b9a674 100644
--- a/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/resolver/JavaPropertyResolver.java
+++ b/compiler/frontend.java/src/org/jetbrains/jet/lang/resolve/java/resolver/JavaPropertyResolver.java
@@ -86,18 +86,13 @@ public Set<VariableDescriptor> resolveFieldGroupByName(
@NotNull ResolverScopeData scopeData,
@NotNull ClassOrNamespaceDescriptor ownerDescriptor
) {
-
- PsiClass psiClass = scopeData.getPsiClass();
-
NamedMembers namedMembers = scopeData.getMembersCache().get(fieldName);
if (namedMembers == null) {
return Collections.emptySet();
}
- //noinspection ConstantConditions
- String qualifiedName = psiClass == null ? scopeData.getPsiPackage().getQualifiedName() : psiClass.getQualifiedName();
return resolveNamedGroupProperties(ownerDescriptor, scopeData, namedMembers, fieldName,
- "class or namespace " + qualifiedName);
+ "class or namespace " + DescriptorUtils.getFQName(ownerDescriptor));
}
@NotNull
|
e5ad59a45147e27b7c2a7a7aed9272f87a5d4746
|
intellij-community
|
WI-31168 Unable to save settings after upgrade- to 2016.1 (cherry picked from commit 5b98073)--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/platform/platform-impl/src/com/intellij/remote/RemoteCredentialsHolder.java b/platform/platform-impl/src/com/intellij/remote/RemoteCredentialsHolder.java
index c437aeda7a4b8..1d9b61d4a782f 100644
--- a/platform/platform-impl/src/com/intellij/remote/RemoteCredentialsHolder.java
+++ b/platform/platform-impl/src/com/intellij/remote/RemoteCredentialsHolder.java
@@ -176,6 +176,7 @@ public void setUseKeyPair(boolean useKeyPair) {
myUseKeyPair = useKeyPair;
}
+ @NotNull
public String getSerializedUserName() {
if (myAnonymous || myUserName == null) return "";
return myUserName;
@@ -268,7 +269,7 @@ public void load(Element element) {
public void save(Element rootElement) {
rootElement.setAttribute(HOST, StringUtil.notNullize(getHost()));
- rootElement.setAttribute(PORT, getLiteralPort());
+ rootElement.setAttribute(PORT, StringUtil.notNullize(getLiteralPort()));
rootElement.setAttribute(ANONYMOUS, Boolean.toString(isAnonymous()));
rootElement.setAttribute(USERNAME, getSerializedUserName());
rootElement.setAttribute(PASSWORD, getSerializedPassword());
|
ceddce542a8dea4d4a647e8c15598fe37393ebb3
|
hbase
|
HBASE-3591 completebulkload doesn't honor generic- -D options--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1076709 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index 9b38023f1076..9aa14cf0184f 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -130,6 +130,7 @@ Release 0.90.2 - Unreleased
HBASE-3572 memstore lab can leave half inited data structs (bad!)
HBASE-3589 test jar should not include mapred-queues.xml and
log4j.properties
+ HBASE-3591 completebulkload doesn't honor generic -D options
IMPROVEMENTS
HBASE-3542 MultiGet methods in Thrift
diff --git a/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
index 88c4b2f75f91..cdc3d91cefc8 100644
--- a/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
+++ b/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
@@ -308,7 +308,7 @@ public int run(String[] args) throws Exception {
}
Path hfofDir = new Path(args[0]);
- HTable table = new HTable(args[1]);
+ HTable table = new HTable(this.getConf(), args[1]);
doBulkLoad(hfofDir, table);
return 0;
|
0dd151142d6a2d2778117905567f203d493268e6
|
kotlin
|
KT-3639 Can't find sources of AbstractIterator- when navigating from stack strace-- - fixed-
|
c
|
https://github.com/JetBrains/kotlin
|
diff --git a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/caches/resolve/IDELightClassGenerationSupport.java b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/caches/resolve/IDELightClassGenerationSupport.java
index edd85586c6b26..68d67345ef64c 100644
--- a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/caches/resolve/IDELightClassGenerationSupport.java
+++ b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/caches/resolve/IDELightClassGenerationSupport.java
@@ -350,18 +350,20 @@ private static KotlinLightClassForDecompiledDeclaration createLightClassForDecom
return null;
}
- ClsClassImpl javaClsClass = createClsJavaClassFromVirtualFile(file, virtualFile);
+ JetClassOrObject classOrObject = singleOrNull(filterIsInstance(file.getDeclarations(), JetClassOrObject.class));
+
+ ClsClassImpl javaClsClass = createClsJavaClassFromVirtualFile(file, virtualFile, classOrObject);
if (javaClsClass == null) {
return null;
}
- JetClassOrObject declaration = singleOrNull(filterIsInstance(file.getDeclarations(), JetClassOrObject.class));
- return new KotlinLightClassForDecompiledDeclaration(javaClsClass, declaration);
+ return new KotlinLightClassForDecompiledDeclaration(javaClsClass, classOrObject);
}
@Nullable
private static ClsClassImpl createClsJavaClassFromVirtualFile(
@NotNull final JetFile decompiledKotlinFile,
- @NotNull VirtualFile virtualFile
+ @NotNull VirtualFile virtualFile,
+ @Nullable final JetClassOrObject decompiledClassOrObject
) {
final PsiJavaFileStubImpl javaFileStub = getOrCreateJavaFileStub(virtualFile);
if (javaFileStub == null) {
@@ -369,6 +371,15 @@ private static ClsClassImpl createClsJavaClassFromVirtualFile(
}
PsiManager manager = PsiManager.getInstance(decompiledKotlinFile.getProject());
ClsFileImpl fakeFile = new ClsFileImpl((PsiManagerImpl) manager, new ClassFileViewProvider(manager, virtualFile)) {
+ @NotNull
+ @Override
+ public PsiElement getNavigationElement() {
+ if (decompiledClassOrObject != null) {
+ return decompiledClassOrObject.getNavigationElement().getContainingFile();
+ }
+ return super.getNavigationElement();
+ }
+
@NotNull
@Override
public PsiClassHolderFileStub getStub() {
diff --git a/idea/testData/filters/exceptionFilter/librarySources/dummy.txt b/idea/testData/filters/exceptionFilter/librarySources/dummy.txt
new file mode 100644
index 0000000000000..ab5e29d0a96b6
--- /dev/null
+++ b/idea/testData/filters/exceptionFilter/librarySources/dummy.txt
@@ -0,0 +1 @@
+This test doesn't have sources, but IDEA test framework needs sources dir
\ No newline at end of file
diff --git a/idea/testData/filters/exceptionFilter/mockLibrary/lib.kt b/idea/testData/filters/exceptionFilter/mockLibrary/lib.kt
new file mode 100644
index 0000000000000..a4fbc9800edda
--- /dev/null
+++ b/idea/testData/filters/exceptionFilter/mockLibrary/lib.kt
@@ -0,0 +1,10 @@
+package test
+
+class Foo {
+ fun member() {
+ }
+}
+
+fun foo() {
+ println()
+}
\ No newline at end of file
diff --git a/idea/testData/filters/exceptionFilter/mockLibrary/other.kt b/idea/testData/filters/exceptionFilter/mockLibrary/other.kt
new file mode 100644
index 0000000000000..2a10208ec7a6e
--- /dev/null
+++ b/idea/testData/filters/exceptionFilter/mockLibrary/other.kt
@@ -0,0 +1,5 @@
+package test
+
+fun other() {
+ println("")
+}
\ No newline at end of file
diff --git a/idea/tests/org/jetbrains/kotlin/idea/filters/JetExceptionFilterTest.kt b/idea/tests/org/jetbrains/kotlin/idea/filters/JetExceptionFilterTest.kt
index 41d7b79c2d3ad..69f5885a0bddc 100644
--- a/idea/tests/org/jetbrains/kotlin/idea/filters/JetExceptionFilterTest.kt
+++ b/idea/tests/org/jetbrains/kotlin/idea/filters/JetExceptionFilterTest.kt
@@ -16,10 +16,17 @@
package org.jetbrains.kotlin.idea.filters
+import com.intellij.execution.filters.FileHyperlinkInfo
import com.intellij.execution.filters.OpenFileHyperlinkInfo
+import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.fileEditor.FileDocumentManager
+import com.intellij.openapi.roots.ModifiableRootModel
+import com.intellij.openapi.roots.ModuleRootManager
+import com.intellij.openapi.roots.OrderRootType
+import com.intellij.openapi.util.io.FileUtilRt
import com.intellij.openapi.vfs.VfsUtilCore
import com.intellij.openapi.vfs.VirtualFile
+import com.intellij.openapi.vfs.VirtualFileManager
import com.intellij.psi.PsiDocumentManager
import com.intellij.psi.search.GlobalSearchScope
import com.intellij.refactoring.MultiFileTestCase
@@ -32,6 +39,7 @@ import org.jetbrains.kotlin.load.kotlin.PackageClassUtils.getPackageClassFqName
import org.jetbrains.kotlin.load.kotlin.PackageClassUtils.getPackageClassName
import org.jetbrains.kotlin.load.kotlin.PackagePartClassUtils.getPackagePartFqName
import org.jetbrains.kotlin.name.FqName
+import org.jetbrains.kotlin.test.MockLibraryUtil
import java.io.File
public class JetExceptionFilterTest : MultiFileTestCase() {
@@ -64,7 +72,7 @@ public class JetExceptionFilterTest : MultiFileTestCase() {
return prefix + element + "\n"
}
- private fun doTest(fileName: String, lineNumber: Int, className: (VirtualFile) -> String, linePrefix: String = "\tat ") {
+ private fun doTest(relativePath: String, lineNumber: Int, className: (VirtualFile) -> String, linePrefix: String = "\tat ", libRootUrl: String? = null) {
if (rootDir == null) {
configure()
}
@@ -72,17 +80,22 @@ public class JetExceptionFilterTest : MultiFileTestCase() {
val filter = JetExceptionFilterFactory().create(GlobalSearchScope.allScope(myProject))
- val expectedFile = VfsUtilCore.findRelativeFile(fileName, rootDir)
+ val expectedFile = if (libRootUrl != null) {
+ VirtualFileManager.getInstance().findFileByUrl(libRootUrl + relativePath);
+ }
+ else {
+ VfsUtilCore.findRelativeFile(relativePath, rootDir);
+ }
TestCase.assertNotNull(expectedFile)
- val line = createStackTraceElementLine(linePrefix, fileName, className.invoke(expectedFile), lineNumber)
+ val line = createStackTraceElementLine(linePrefix, relativePath, className(expectedFile), lineNumber)
val result = filter.applyFilter(line, 0)
TestCase.assertNotNull(result)
val info = result.getFirstHyperlinkInfo()
TestCase.assertNotNull(info)
- UsefulTestCase.assertInstanceOf(info, javaClass<OpenFileHyperlinkInfo>())
- val descriptor = (info as OpenFileHyperlinkInfo).getDescriptor()
+ info as FileHyperlinkInfo
+ val descriptor = info.getDescriptor()
TestCase.assertNotNull(descriptor)
TestCase.assertEquals(expectedFile, descriptor.getFile())
@@ -113,4 +126,27 @@ public class JetExceptionFilterTest : MultiFileTestCase() {
doTest("1/foo.kt", 4, { file -> "" + getPackagePartFqName(packageClassFqName, file) + "\$foo\$f\$1" })
doTest("2/foo.kt", 4, { file -> "" + getPackagePartFqName(packageClassFqName, file) + "\$foo\$f\$1" })
}
+
+ public fun testLibrarySources() {
+ val mockLibrary = MockLibraryUtil.compileLibraryToJar(getTestDataPath() + getTestRoot() + "mockLibrary", "mockLibrary", true)
+
+ val libRootUrl = "jar://" + FileUtilRt.toSystemIndependentName(mockLibrary.getAbsolutePath()) + "!/"
+
+ ApplicationManager.getApplication().runWriteAction {
+ val moduleModel = ModuleRootManager.getInstance(myModule).getModifiableModel()
+ with(moduleModel.getModuleLibraryTable().getModifiableModel().createLibrary("mockLibrary").getModifiableModel()) {
+ addRoot(libRootUrl, OrderRootType.CLASSES)
+ addRoot(libRootUrl + "src/", OrderRootType.SOURCES)
+ commit()
+ }
+ moduleModel.commit()
+ }
+
+ val packageClassFqName = FqName("test.TestPackage")
+
+ doTest("src/lib.kt", 3, { "test.Foo" }, libRootUrl = libRootUrl)
+ doTest("src/lib.kt", 4, { "test.Foo" }, libRootUrl = libRootUrl)
+ doTest("src/lib.kt", 9, { "" + getPackagePartFqName(packageClassFqName, it) }, libRootUrl = libRootUrl)
+ doTest("src/other.kt", 4, { "" + getPackagePartFqName(packageClassFqName, it) }, libRootUrl = libRootUrl)
+ }
}
|
602c63d2aa3936a766e4e3432721812096ed54f5
|
elasticsearch
|
pass on node seed to the node level settings in- TestCluster--
|
p
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/test/java/org/elasticsearch/test/TestCluster.java b/src/test/java/org/elasticsearch/test/TestCluster.java
index 15092378d4d8c..2504358499846 100644
--- a/src/test/java/org/elasticsearch/test/TestCluster.java
+++ b/src/test/java/org/elasticsearch/test/TestCluster.java
@@ -99,6 +99,12 @@ public final class TestCluster implements Iterable<Client> {
*/
public static final String TESTS_ENABLE_MOCK_MODULES = "tests.enable_mock_modules";
+ /**
+ * A node level setting that holds a per node random seed that is consistent across node restarts
+ */
+ public static final String SETTING_CLUSTER_NODE_SEED = "test.cluster.node.seed";
+
+
private static final boolean ENABLE_MOCK_MODULES = systemPropertyAsBoolean(TESTS_ENABLE_MOCK_MODULES, true);
private static long clusterSeed() {
@@ -205,7 +211,8 @@ private static Settings getRandomNodeSettings(long seed, String clusterName) {
// decrease the routing schedule so new nodes will be added quickly - some random value between 30 and 80 ms
.put("cluster.routing.schedule", (30 + random.nextInt(50)) + "ms")
// default to non gateway
- .put("gateway.type", "none");
+ .put("gateway.type", "none")
+ .put(SETTING_CLUSTER_NODE_SEED, seed);
if (ENABLE_MOCK_MODULES && usually(random)) {
builder.put("index.store.type", MockFSIndexStoreModule.class.getName()); // no RAM dir for now!
builder.put(IndexEngineModule.EngineSettings.ENGINE_TYPE, MockEngineModule.class.getName());
|
03ef46a1cfe87d411243e5189a4906d0fce38237
|
orientdb
|
DbDeleteTest is made more stable.--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/memory/eh/OExtendibleHashingTable.java b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/memory/eh/OExtendibleHashingTable.java
index 081583af4df..163d7c2a099 100755
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/memory/eh/OExtendibleHashingTable.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/memory/eh/OExtendibleHashingTable.java
@@ -58,27 +58,27 @@ public OExtendibleHashingTable() {
}
public boolean put(OPhysicalPosition value) {
- NodeInfo nodeInfo = getBucket(value.clusterPosition);
- long[] node = hashTree[nodeInfo.nodeIndex];
+ NodePath nodePath = getBucket(value.clusterPosition);
+ long[] node = hashTree[nodePath.nodeIndex];
- long filePosition = node[nodeInfo.itemIndex + nodeInfo.hashMapOffset];
+ long filePosition = node[nodePath.itemIndex + nodePath.hashMapOffset];
final OExtendibleHashingBucket bucket;
long newFilePosition;
if (filePosition == 0) {
- bucket = new OExtendibleHashingBucket(nodeInfo.nodeGlobalDepth);
+ bucket = new OExtendibleHashingBucket(nodePath.nodeGlobalDepth);
- final long nextBucketPos = nextBucket(new NodeInfo(nodeInfo.parent, nodeInfo.hashMapOffset, nodeInfo.itemIndex + 1,
- nodeInfo.nodeIndex, nodeInfo.nodeGlobalDepth));
+ final long nextBucketPos = nextBucket(new NodePath(nodePath.parent, nodePath.hashMapOffset, nodePath.itemIndex + 1,
+ nodePath.nodeIndex, nodePath.nodeGlobalDepth));
bucket.setNextBucket(nextBucketPos);
- final long prevBucketPos = prevBucket(new NodeInfo(nodeInfo.parent, nodeInfo.hashMapOffset, nodeInfo.itemIndex - 1,
- nodeInfo.nodeIndex, nodeInfo.nodeGlobalDepth));
+ final long prevBucketPos = prevBucket(new NodePath(nodePath.parent, nodePath.hashMapOffset, nodePath.itemIndex - 1,
+ nodePath.nodeIndex, nodePath.nodeGlobalDepth));
bucket.setPrevBucket(prevBucketPos);
file.add(bucket);
newFilePosition = file.size();
- node[nodeInfo.itemIndex + nodeInfo.hashMapOffset] = newFilePosition;
+ node[nodePath.itemIndex + nodePath.hashMapOffset] = newFilePosition;
if (nextBucketPos > 0) {
final OExtendibleHashingBucket nextBucket = file.get((int) nextBucketPos - 1);
@@ -112,7 +112,7 @@ public boolean put(OPhysicalPosition value) {
newFilePosition = file.size();
- if (((nodeInfo.itemIndex >>> (64 - bucketDepth + 1)) & 1) == 0) {
+ if (((nodePath.itemIndex >>> (64 - bucketDepth + 1)) & 1) == 0) {
final long oldNextBucketPosition = bucket.getNextBucket();
bucket.setNextBucket(newFilePosition);
@@ -142,13 +142,13 @@ public boolean put(OPhysicalPosition value) {
assert checkFileOrder();
- if (bucketDepth <= nodeInfo.nodeGlobalDepth) {
- updateNodeAfterSplit(nodeInfo, bucketDepth, newFilePosition);
+ if (bucketDepth <= nodePath.nodeGlobalDepth) {
+ updateNodeAfterSplit(nodePath, bucketDepth, newFilePosition);
} else {
- if (nodeLocalDepths[nodeInfo.nodeIndex] < MAX_LEVEL_DEPTH) {
- final long[] newNode = splitNode(nodeInfo, node);
+ if (nodeLocalDepths[nodePath.nodeIndex] < MAX_LEVEL_DEPTH) {
+ final long[] newNode = splitNode(nodePath, node);
- final int nodeLocalDepth = nodeLocalDepths[nodeInfo.nodeIndex];
+ final int nodeLocalDepth = nodeLocalDepths[nodePath.nodeIndex];
final int hashMapSize = 1 << nodeLocalDepth;
boolean allHashMapsEquals = checkAllMapsContainSameBucket(newNode, hashMapSize);
@@ -158,27 +158,27 @@ public boolean put(OPhysicalPosition value) {
newNodeIndex = addNewNode(newNode, nodeLocalDepth);
}
- updateNodesAfterSplit(nodeInfo, newNode, nodeLocalDepth, hashMapSize, allHashMapsEquals, newNodeIndex);
+ updateNodesAfterSplit(nodePath, newNode, nodeLocalDepth, hashMapSize, allHashMapsEquals, newNodeIndex);
- final int newIndex = nodeInfo.itemIndex << 1;
- final int newOffset = nodeInfo.hashMapOffset << 1;
- final int newGlobalDepth = nodeInfo.nodeGlobalDepth + 1;
+ final int newIndex = nodePath.itemIndex << 1;
+ final int newOffset = nodePath.hashMapOffset << 1;
+ final int newGlobalDepth = nodePath.nodeGlobalDepth + 1;
if (newOffset < MAX_LEVEL_SIZE) {
- final NodeInfo updatedNodeInfo = new NodeInfo(nodeInfo.parent, newOffset, newIndex, nodeInfo.nodeIndex, newGlobalDepth);
- updateNodeAfterSplit(updatedNodeInfo, bucketDepth, newFilePosition);
+ final NodePath updatedNodePath = new NodePath(nodePath.parent, newOffset, newIndex, nodePath.nodeIndex, newGlobalDepth);
+ updateNodeAfterSplit(updatedNodePath, bucketDepth, newFilePosition);
} else {
- final NodeInfo newNodeInfo;
+ final NodePath newNodePath;
if (!allHashMapsEquals) {
- newNodeInfo = new NodeInfo(nodeInfo.parent, newOffset - MAX_LEVEL_SIZE, newIndex, newNodeIndex, newGlobalDepth);
+ newNodePath = new NodePath(nodePath.parent, newOffset - MAX_LEVEL_SIZE, newIndex, newNodeIndex, newGlobalDepth);
} else {
- newNodeInfo = nodeInfo.parent;
+ newNodePath = nodePath.parent;
}
- updateNodeAfterSplit(newNodeInfo, bucketDepth, newFilePosition);
+ updateNodeAfterSplit(newNodePath, bucketDepth, newFilePosition);
}
} else {
- addNewLevelNode(nodeInfo, node, newFilePosition);
+ addNewLevelNode(nodePath, node, newFilePosition);
}
}
@@ -186,8 +186,8 @@ public boolean put(OPhysicalPosition value) {
}
public boolean contains(OClusterPosition clusterPosition) {
- NodeInfo nodeInfo = getBucket(clusterPosition);
- long position = hashTree[nodeInfo.nodeIndex][nodeInfo.itemIndex + nodeInfo.hashMapOffset];
+ NodePath nodePath = getBucket(clusterPosition);
+ long position = hashTree[nodePath.nodeIndex][nodePath.itemIndex + nodePath.hashMapOffset];
if (position == 0)
return false;
@@ -197,8 +197,8 @@ public boolean contains(OClusterPosition clusterPosition) {
}
public OPhysicalPosition delete(OClusterPosition clusterPosition) {
- final NodeInfo nodeInfo = getBucket(clusterPosition);
- final long position = hashTree[nodeInfo.nodeIndex][nodeInfo.itemIndex + nodeInfo.hashMapOffset];
+ final NodePath nodePath = getBucket(clusterPosition);
+ final long position = hashTree[nodePath.nodeIndex][nodePath.itemIndex + nodePath.hashMapOffset];
final OExtendibleHashingBucket bucket = file.get((int) position - 1);
final int positionIndex = bucket.getPosition(clusterPosition);
if (positionIndex < 0)
@@ -208,38 +208,38 @@ public OPhysicalPosition delete(OClusterPosition clusterPosition) {
if (bucket.size() > 0)
return removedPosition;
- mergeNodesAfterDeletion(nodeInfo, bucket, position);
+ mergeNodesAfterDeletion(nodePath, bucket, position);
assert checkFileOrder();
- if (nodeInfo.parent != null) {
- final int hashMapSize = 1 << nodeLocalDepths[nodeInfo.nodeIndex];
+ if (nodePath.parent != null) {
+ final int hashMapSize = 1 << nodeLocalDepths[nodePath.nodeIndex];
- final long[] node = hashTree[nodeInfo.nodeIndex];
+ final long[] node = hashTree[nodePath.nodeIndex];
final boolean allMapsContainSameBucket = checkAllMapsContainSameBucket(node, hashMapSize);
if (allMapsContainSameBucket)
- mergeNodeToParent(node, nodeInfo);
+ mergeNodeToParent(node, nodePath);
}
return removedPosition;
}
- private void mergeNodeToParent(long[] node, NodeInfo nodeInfo) {
- final long[] parentNode = hashTree[nodeInfo.parent.nodeIndex];
+ private void mergeNodeToParent(long[] node, NodePath nodePath) {
+ final long[] parentNode = hashTree[nodePath.parent.nodeIndex];
int startIndex = -1;
for (int i = 0; i < parentNode.length; i++)
- if (parentNode[i] < 0 && (parentNode[i] & Long.MAX_VALUE) >>> 8 == nodeInfo.nodeIndex) {
+ if (parentNode[i] < 0 && (parentNode[i] & Long.MAX_VALUE) >>> 8 == nodePath.nodeIndex) {
startIndex = i;
break;
}
- final int hashMapSize = 1 << nodeLocalDepths[nodeInfo.nodeIndex];
+ final int hashMapSize = 1 << nodeLocalDepths[nodePath.nodeIndex];
for (int i = 0, k = startIndex; i < node.length; i += hashMapSize, k++) {
parentNode[k] = node[i];
}
- deleteNode(nodeInfo.nodeIndex);
+ deleteNode(nodePath.nodeIndex);
}
private void deleteNode(int nodeIndex) {
@@ -259,15 +259,15 @@ private void deleteNode(int nodeIndex) {
}
}
- private void mergeNodesAfterDeletion(NodeInfo nodeInfo, OExtendibleHashingBucket bucket, long filePosition) {
+ private void mergeNodesAfterDeletion(NodePath nodePath, OExtendibleHashingBucket bucket, long filePosition) {
final int bucketDepth = bucket.getDepth();
- int offset = nodeInfo.nodeGlobalDepth - (bucketDepth - 1);
- NodeInfo currentNode = nodeInfo;
- int nodeLocalDepth = nodeLocalDepths[nodeInfo.nodeIndex];
+ int offset = nodePath.nodeGlobalDepth - (bucketDepth - 1);
+ NodePath currentNode = nodePath;
+ int nodeLocalDepth = nodeLocalDepths[nodePath.nodeIndex];
while (offset > 0) {
offset -= nodeLocalDepth;
if (offset > 0) {
- currentNode = nodeInfo.parent;
+ currentNode = nodePath.parent;
nodeLocalDepth = nodeLocalDepths[currentNode.nodeIndex];
}
}
@@ -354,10 +354,10 @@ private void mergeNodesAfterDeletion(NodeInfo nodeInfo, OExtendibleHashingBucket
assert checkBucketDepth(buddyBucket);
}
- private long nextBucket(NodeInfo nodeInfo) {
- nextBucketLoop: while (nodeInfo != null) {
- final long[] node = hashTree[nodeInfo.nodeIndex];
- final int startIndex = nodeInfo.itemIndex + nodeInfo.hashMapOffset;
+ private long nextBucket(NodePath nodePath) {
+ nextBucketLoop: while (nodePath != null) {
+ final long[] node = hashTree[nodePath.nodeIndex];
+ final int startIndex = nodePath.itemIndex + nodePath.hashMapOffset;
final int endIndex = MAX_LEVEL_SIZE;
for (int i = startIndex; i < endIndex; i++) {
@@ -369,44 +369,44 @@ private long nextBucket(NodeInfo nodeInfo) {
final int childNodeIndex = (int) ((position & Long.MAX_VALUE) >> 8);
final int childItemOffset = (int) position & 0xFF;
- final NodeInfo parent = new NodeInfo(nodeInfo.parent, 0, i, nodeInfo.nodeIndex, -1);
- nodeInfo = new NodeInfo(parent, childItemOffset, 0, childNodeIndex, -1);
+ final NodePath parent = new NodePath(nodePath.parent, 0, i, nodePath.nodeIndex, -1);
+ nodePath = new NodePath(parent, childItemOffset, 0, childNodeIndex, -1);
continue nextBucketLoop;
}
}
- nodeInfo = nextLevelUp(nodeInfo);
+ nodePath = nextLevelUp(nodePath);
}
return 0;
}
- private NodeInfo nextLevelUp(NodeInfo nodeInfo) {
- if (nodeInfo.parent == null)
+ private NodePath nextLevelUp(NodePath nodePath) {
+ if (nodePath.parent == null)
return null;
- final int nodeLocalDepth = nodeLocalDepths[nodeInfo.nodeIndex];
+ final int nodeLocalDepth = nodeLocalDepths[nodePath.nodeIndex];
final int pointersSize = 1 << (MAX_LEVEL_DEPTH - nodeLocalDepth);
- final NodeInfo parent = nodeInfo.parent;
+ final NodePath parent = nodePath.parent;
if (parent.itemIndex < MAX_LEVEL_SIZE / 2) {
final int nextParentIndex = (parent.itemIndex / pointersSize + 1) * pointersSize;
- return new NodeInfo(parent.parent, 0, nextParentIndex, parent.nodeIndex, parent.nodeGlobalDepth);
+ return new NodePath(parent.parent, 0, nextParentIndex, parent.nodeIndex, parent.nodeGlobalDepth);
}
- final int nextParentIndex = ((nodeInfo.parent.itemIndex - MAX_LEVEL_SIZE / 2) / pointersSize + 1) * pointersSize;
+ final int nextParentIndex = ((nodePath.parent.itemIndex - MAX_LEVEL_SIZE / 2) / pointersSize + 1) * pointersSize;
if (nextParentIndex < MAX_LEVEL_SIZE)
- return new NodeInfo(parent.parent, 0, nextParentIndex, parent.nodeIndex, parent.nodeGlobalDepth);
+ return new NodePath(parent.parent, 0, nextParentIndex, parent.nodeIndex, parent.nodeGlobalDepth);
- return nextLevelUp(new NodeInfo(parent.parent, 0, MAX_LEVEL_SIZE - 1, parent.nodeIndex, parent.nodeGlobalDepth));
+ return nextLevelUp(new NodePath(parent.parent, 0, MAX_LEVEL_SIZE - 1, parent.nodeIndex, parent.nodeGlobalDepth));
}
- private long prevBucket(NodeInfo nodeInfo) {
- prevBucketLoop: while (nodeInfo != null) {
- final long[] node = hashTree[nodeInfo.nodeIndex];
+ private long prevBucket(NodePath nodePath) {
+ prevBucketLoop: while (nodePath != null) {
+ final long[] node = hashTree[nodePath.nodeIndex];
final int startIndex = 0;
- final int endIndex = nodeInfo.itemIndex + nodeInfo.hashMapOffset;
+ final int endIndex = nodePath.itemIndex + nodePath.hashMapOffset;
for (int i = endIndex; i >= startIndex; i--) {
final long position = node[i];
@@ -419,37 +419,37 @@ private long prevBucket(NodeInfo nodeInfo) {
final int localDepth = nodeLocalDepths[childNodeIndex];
final int endChildIndex = 1 << localDepth - 1;
- final NodeInfo parent = new NodeInfo(nodeInfo.parent, 0, i, nodeInfo.nodeIndex, -1);
- nodeInfo = new NodeInfo(parent, childItemOffset, endChildIndex, childNodeIndex, -1);
+ final NodePath parent = new NodePath(nodePath.parent, 0, i, nodePath.nodeIndex, -1);
+ nodePath = new NodePath(parent, childItemOffset, endChildIndex, childNodeIndex, -1);
continue prevBucketLoop;
}
}
- nodeInfo = prevLevelUp(nodeInfo);
+ nodePath = prevLevelUp(nodePath);
}
return 0;
}
- private NodeInfo prevLevelUp(NodeInfo nodeInfo) {
- if (nodeInfo.parent == null)
+ private NodePath prevLevelUp(NodePath nodePath) {
+ if (nodePath.parent == null)
return null;
- final int nodeLocalDepth = nodeLocalDepths[nodeInfo.nodeIndex];
+ final int nodeLocalDepth = nodeLocalDepths[nodePath.nodeIndex];
final int pointersSize = 1 << (MAX_LEVEL_DEPTH - nodeLocalDepth);
- final NodeInfo parent = nodeInfo.parent;
+ final NodePath parent = nodePath.parent;
if (parent.itemIndex > MAX_LEVEL_SIZE / 2) {
- final int prevParentIndex = ((nodeInfo.parent.itemIndex - MAX_LEVEL_SIZE / 2) / pointersSize) * pointersSize - 1;
- return new NodeInfo(parent.parent, 0, prevParentIndex, parent.nodeIndex, -1);
+ final int prevParentIndex = ((nodePath.parent.itemIndex - MAX_LEVEL_SIZE / 2) / pointersSize) * pointersSize - 1;
+ return new NodePath(parent.parent, 0, prevParentIndex, parent.nodeIndex, -1);
}
final int prevParentIndex = (parent.itemIndex / pointersSize) * pointersSize - 1;
if (prevParentIndex >= 0)
- return new NodeInfo(parent.parent, 0, prevParentIndex, parent.nodeIndex, -1);
+ return new NodePath(parent.parent, 0, prevParentIndex, parent.nodeIndex, -1);
- return prevLevelUp(new NodeInfo(parent.parent, 0, 0, parent.nodeIndex, -1));
+ return prevLevelUp(new NodePath(parent.parent, 0, 0, parent.nodeIndex, -1));
}
public void clear() {
@@ -474,8 +474,8 @@ public long size() {
}
public OPhysicalPosition get(OClusterPosition clusterPosition) {
- NodeInfo nodeInfo = getBucket(clusterPosition);
- long position = hashTree[nodeInfo.nodeIndex][nodeInfo.itemIndex + nodeInfo.hashMapOffset];
+ NodePath nodePath = getBucket(clusterPosition);
+ long position = hashTree[nodePath.nodeIndex][nodePath.itemIndex + nodePath.hashMapOffset];
if (position == 0)
return null;
@@ -485,10 +485,10 @@ public OPhysicalPosition get(OClusterPosition clusterPosition) {
}
public Entry[] higherEntries(OClusterPosition key) {
- final NodeInfo nodeInfo = getBucket(key);
- long position = hashTree[nodeInfo.nodeIndex][nodeInfo.itemIndex + nodeInfo.hashMapOffset];
+ final NodePath nodePath = getBucket(key);
+ long position = hashTree[nodePath.nodeIndex][nodePath.itemIndex + nodePath.hashMapOffset];
if (position == 0)
- position = nextBucket(nodeInfo);
+ position = nextBucket(nodePath);
if (position == 0)
return new Entry[0];
@@ -518,10 +518,10 @@ public Entry[] higherEntries(OClusterPosition key) {
}
public Entry[] ceilingEntries(OClusterPosition key) {
- final NodeInfo nodeInfo = getBucket(key);
- long position = hashTree[nodeInfo.nodeIndex][nodeInfo.itemIndex + nodeInfo.hashMapOffset];
+ final NodePath nodePath = getBucket(key);
+ long position = hashTree[nodePath.nodeIndex][nodePath.itemIndex + nodePath.hashMapOffset];
if (position == 0)
- position = nextBucket(nodeInfo);
+ position = nextBucket(nodePath);
if (position == 0)
return new Entry[0];
@@ -551,10 +551,10 @@ public Entry[] ceilingEntries(OClusterPosition key) {
}
public Entry[] lowerEntries(OClusterPosition key) {
- final NodeInfo nodeInfo = getBucket(key);
- long position = hashTree[nodeInfo.nodeIndex][nodeInfo.itemIndex + nodeInfo.hashMapOffset];
+ final NodePath nodePath = getBucket(key);
+ long position = hashTree[nodePath.nodeIndex][nodePath.itemIndex + nodePath.hashMapOffset];
if (position == 0)
- position = prevBucket(nodeInfo);
+ position = prevBucket(nodePath);
if (position == 0)
return new Entry[0];
@@ -585,10 +585,10 @@ public Entry[] lowerEntries(OClusterPosition key) {
}
public Entry[] floorEntries(OClusterPosition key) {
- final NodeInfo nodeInfo = getBucket(key);
- long position = hashTree[nodeInfo.nodeIndex][nodeInfo.itemIndex + nodeInfo.hashMapOffset];
+ final NodePath nodePath = getBucket(key);
+ long position = hashTree[nodePath.nodeIndex][nodePath.itemIndex + nodePath.hashMapOffset];
if (position == 0)
- position = prevBucket(nodeInfo);
+ position = prevBucket(nodePath);
if (position == 0)
return new Entry[0];
@@ -644,14 +644,14 @@ private Entry[] convertBucketToEntries(final OExtendibleHashingBucket bucket, in
return entries;
}
- private void addNewLevelNode(NodeInfo nodeInfo, long[] node, long newFilePosition) {
+ private void addNewLevelNode(NodePath nodePath, long[] node, long newFilePosition) {
final long[] newNode = new long[MAX_LEVEL_SIZE];
final int newNodeDepth;
final int newNodeStartIndex;
final int mapInterval;
- if (nodeInfo.itemIndex < node.length / 2) {
+ if (nodePath.itemIndex < node.length / 2) {
final int maxDepth = getMaxLevelDepth(node, 0, node.length / 2);
if (maxDepth > 0)
newNodeDepth = maxDepth;
@@ -659,7 +659,7 @@ private void addNewLevelNode(NodeInfo nodeInfo, long[] node, long newFilePositio
newNodeDepth = 1;
mapInterval = 1 << (MAX_LEVEL_DEPTH - newNodeDepth);
- newNodeStartIndex = (nodeInfo.itemIndex / mapInterval) * mapInterval;
+ newNodeStartIndex = (nodePath.itemIndex / mapInterval) * mapInterval;
} else {
final int maxDepth = getMaxLevelDepth(node, node.length / 2, node.length);
if (maxDepth > 0)
@@ -668,7 +668,7 @@ private void addNewLevelNode(NodeInfo nodeInfo, long[] node, long newFilePositio
newNodeDepth = 1;
mapInterval = 1 << (MAX_LEVEL_DEPTH - newNodeDepth);
- newNodeStartIndex = ((nodeInfo.itemIndex - node.length / 2) / mapInterval) * mapInterval + node.length / 2;
+ newNodeStartIndex = ((nodePath.itemIndex - node.length / 2) / mapInterval) * mapInterval + node.length / 2;
}
final int newNodeIndex = addNewNode(newNode, newNodeDepth);
@@ -676,7 +676,7 @@ private void addNewLevelNode(NodeInfo nodeInfo, long[] node, long newFilePositio
for (int i = 0; i < mapInterval; i++) {
final int nodeOffset = i + newNodeStartIndex;
final long position = node[nodeOffset];
- if (nodeOffset != nodeInfo.itemIndex) {
+ if (nodeOffset != nodePath.itemIndex) {
for (int n = i << newNodeDepth; n < (i + 1) << newNodeDepth; n++)
newNode[n] = position;
} else {
@@ -712,19 +712,19 @@ private int getMaxLevelDepth(long node[], int start, int end) {
return maxDepth;
}
- private void updateNodesAfterSplit(NodeInfo nodeInfo, long[] newNode, int nodeLocalDepth, int hashMapSize,
+ private void updateNodesAfterSplit(NodePath nodePath, long[] newNode, int nodeLocalDepth, int hashMapSize,
boolean allHashMapsEquals, int newNodeIndex) {
- final long[] parentNode = hashTree[nodeInfo.parent.nodeIndex];
+ final long[] parentNode = hashTree[nodePath.parent.nodeIndex];
int startIndex = -1;
for (int i = 0; i < parentNode.length; i++)
- if (parentNode[i] < 0 && (parentNode[i] & Long.MAX_VALUE) >>> 8 == nodeInfo.nodeIndex) {
+ if (parentNode[i] < 0 && (parentNode[i] & Long.MAX_VALUE) >>> 8 == nodePath.nodeIndex) {
startIndex = i;
break;
}
final int pointersSize = 1 << (MAX_LEVEL_DEPTH - nodeLocalDepth);
for (int i = 0; i < pointersSize; i++) {
- parentNode[startIndex + i] = (nodeInfo.nodeIndex << 8) | (i * hashMapSize) | Long.MIN_VALUE;
+ parentNode[startIndex + i] = (nodePath.nodeIndex << 8) | (i * hashMapSize) | Long.MIN_VALUE;
}
if (allHashMapsEquals) {
@@ -777,7 +777,7 @@ private boolean assertAllNodesAreFilePointers(boolean allHashMapsEquals, long[]
return true;
}
- private long[] splitNode(NodeInfo nodeInfo, long[] node) {
+ private long[] splitNode(NodePath nodePath, long[] node) {
final long[] newNode = new long[MAX_LEVEL_SIZE];
for (int i = MAX_LEVEL_SIZE / 2; i < MAX_LEVEL_SIZE; i++) {
@@ -795,8 +795,8 @@ private long[] splitNode(NodeInfo nodeInfo, long[] node) {
updatedNode[2 * i + 1] = position;
}
- nodeLocalDepths[nodeInfo.nodeIndex]++;
- hashTree[nodeInfo.nodeIndex] = updatedNode;
+ nodeLocalDepths[nodePath.nodeIndex]++;
+ hashTree[nodePath.nodeIndex] = updatedNode;
return newNode;
}
@@ -835,9 +835,9 @@ private int addNewNode(long[] newNode, int nodeLocalDepth) {
return hashTreeSize - 1;
}
- private void updateNodeAfterSplit(NodeInfo info, int bucketDepth, long newFilePosition) {
+ private void updateNodeAfterSplit(NodePath info, int bucketDepth, long newFilePosition) {
int offset = info.nodeGlobalDepth - (bucketDepth - 1);
- NodeInfo currentNode = info;
+ NodePath currentNode = info;
int nodeLocalDepth = nodeLocalDepths[info.nodeIndex];
while (offset > 0) {
offset -= nodeLocalDepth;
@@ -877,16 +877,16 @@ private void updateBucket(int nodeIndex, int itemIndex, int offset, long newFile
}
}
- private NodeInfo getBucket(final OClusterPosition key) {
+ private NodePath getBucket(final OClusterPosition key) {
return getBucket(key, null);
}
- private NodeInfo getBucket(final OClusterPosition key, NodeInfo startNode) {
+ private NodePath getBucket(final OClusterPosition key, NodePath startNode) {
final long hash = key.longValueHigh();
int nodeDepth;
int localNodeDepth;
- NodeInfo parentNode;
+ NodePath parentNode;
int nodeIndex;
int offset;
@@ -905,7 +905,7 @@ private NodeInfo getBucket(final OClusterPosition key, NodeInfo startNode) {
}
int index = (int) ((hash >>> (64 - nodeDepth)) & (LEVEL_MASK >>> (MAX_LEVEL_DEPTH - localNodeDepth)));
- NodeInfo currentNode = new NodeInfo(parentNode, 0, index, 0, nodeDepth);
+ NodePath currentNode = new NodePath(parentNode, 0, index, 0, nodeDepth);
do {
final long position = hashTree[nodeIndex][index + offset];
if (position >= 0)
@@ -920,7 +920,7 @@ private NodeInfo getBucket(final OClusterPosition key, NodeInfo startNode) {
index = (int) ((hash >>> (64 - nodeDepth)) & (LEVEL_MASK >>> (MAX_LEVEL_DEPTH - localNodeDepth)));
parentNode = currentNode;
- currentNode = new NodeInfo(parentNode, offset, index, nodeIndex, nodeDepth);
+ currentNode = new NodePath(parentNode, offset, index, nodeIndex, nodeDepth);
} while (nodeDepth <= 64);
throw new IllegalStateException("Extendible hashing tree in corrupted state.");
@@ -972,7 +972,7 @@ private boolean checkFileOrder() {
if (size == 0)
return true;
- final long firstBucket = nextBucket(new NodeInfo(null, 0, 0, 0, MAX_LEVEL_DEPTH));
+ final long firstBucket = nextBucket(new NodePath(null, 0, 0, 0, MAX_LEVEL_DEPTH));
OExtendibleHashingBucket bucket = file.get((int) firstBucket - 1);
OClusterPosition lastPrevKey = null;
@@ -997,14 +997,14 @@ private boolean checkFileOrder() {
return true;
}
- private static final class NodeInfo {
- private final NodeInfo parent;
+ private static final class NodePath {
+ private final NodePath parent;
private final int hashMapOffset;
private final int itemIndex;
private final int nodeIndex;
private final int nodeGlobalDepth;
- private NodeInfo(NodeInfo parent, int hashMapOffset, int itemIndex, int nodeIndex, int nodeDepth) {
+ private NodePath(NodePath parent, int hashMapOffset, int itemIndex, int nodeIndex, int nodeDepth) {
this.parent = parent;
this.hashMapOffset = hashMapOffset;
this.itemIndex = itemIndex;
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DbDeleteTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DbDeleteTest.java
index 1b71faff274..a517943a4d0 100755
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DbDeleteTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DbDeleteTest.java
@@ -18,10 +18,6 @@
import java.io.File;
import java.io.IOException;
-import com.orientechnologies.orient.core.config.OGlobalConfiguration;
-import com.orientechnologies.orient.core.metadata.schema.OClass;
-import com.orientechnologies.orient.core.metadata.schema.OType;
-import com.orientechnologies.orient.core.record.impl.ODocument;
import org.testng.Assert;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
@@ -32,6 +28,9 @@
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.exception.ODatabaseException;
import com.orientechnologies.orient.core.exception.OStorageException;
+import com.orientechnologies.orient.core.metadata.schema.OClass;
+import com.orientechnologies.orient.core.metadata.schema.OType;
+import com.orientechnologies.orient.core.record.impl.ODocument;
@Test(groups = "db")
public class DbDeleteTest {
@@ -65,23 +64,23 @@ public void testDbDelete() throws IOException {
Assert.assertFalse(new File(testPath + "/" + DbImportExportTest.NEW_DB_PATH).exists());
}
- public void testDbDeleteWithIndex() {
- final ODatabaseDocument db = new ODatabaseDocumentTx("local:" + testPath + "target/testDbDeleteWithIndex");
- if (db.exists()) {
- db.open("admin", "admin");
- db.drop();
- }
+ public void testDbDeleteWithIndex() {
+ final ODatabaseDocument db = new ODatabaseDocumentTx("local:" + testPath + "core/target/testDbDeleteWithIndex");
+ if (db.exists()) {
+ db.open("admin", "admin");
+ db.drop();
+ }
- db.create();
+ db.create();
- final OClass indexedClass = db.getMetadata().getSchema().createClass("IndexedClass");
- indexedClass.createProperty("value", OType.STRING);
- indexedClass.createIndex("indexValue", OClass.INDEX_TYPE.UNIQUE, "value");
+ final OClass indexedClass = db.getMetadata().getSchema().createClass("IndexedClass");
+ indexedClass.createProperty("value", OType.STRING);
+ indexedClass.createIndex("indexValue", OClass.INDEX_TYPE.UNIQUE, "value");
- final ODocument document = new ODocument("IndexedClass");
- document.field("value", "value");
- document.save();
+ final ODocument document = new ODocument("IndexedClass");
+ document.field("value", "value");
+ document.save();
- db.drop();
- }
+ db.drop();
+ }
}
|
cb5df26bf792108e5063e99b4359b33cf7422f22
|
elasticsearch
|
lucene 4: use the proper token stream to return--
|
c
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/main/java/org/elasticsearch/common/lucene/all/AllField.java b/src/main/java/org/elasticsearch/common/lucene/all/AllField.java
index 66d39ef57fefc..3fdff97ca6dfe 100644
--- a/src/main/java/org/elasticsearch/common/lucene/all/AllField.java
+++ b/src/main/java/org/elasticsearch/common/lucene/all/AllField.java
@@ -57,7 +57,7 @@ public Reader readerValue() {
}
@Override
- public TokenStream tokenStreamValue() {
+ public TokenStream tokenStream(Analyzer analyzer) throws IOException {
try {
allEntries.reset(); // reset the all entries, just in case it was read already
return AllTokenStream.allTokenStream(name, allEntries, analyzer);
|
b27c2ddcb46bed712c13cc5eb90b968652964d43
|
restlet-framework-java
|
Added support of the dataservices extension in- Android.--
|
a
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet.ext.xml/src/org/restlet/ext/xml/SaxRepresentation.java.android b/modules/org.restlet.ext.xml/src/org/restlet/ext/xml/SaxRepresentation.java.android
index 3f52fdd911..c19939f9b9 100644
--- a/modules/org.restlet.ext.xml/src/org/restlet/ext/xml/SaxRepresentation.java.android
+++ b/modules/org.restlet.ext.xml/src/org/restlet/ext/xml/SaxRepresentation.java.android
@@ -38,7 +38,6 @@ import javax.xml.parsers.SAXParserFactory;
import org.restlet.data.MediaType;
import org.restlet.representation.Representation;
-import org.w3c.dom.Document;
import org.xml.sax.ContentHandler;
import org.xml.sax.InputSource;
import org.xml.sax.XMLReader;
@@ -101,6 +100,10 @@ public class SaxRepresentation extends XmlRepresentation {
this.source = new InputSource(xmlRepresentation.getStream());
}
+ if (xmlRepresentation.getCharacterSet()!= null) {
+ this.source.setEncoding(xmlRepresentation.getCharacterSet().getName());
+ }
+
if (xmlRepresentation.getIdentifier() != null) {
this.source.setSystemId(xmlRepresentation.getIdentifier()
.getTargetRef().toString());
diff --git a/modules/org.restlet.ext.xml/src/org/restlet/ext/xml/XmlRepresentation.java b/modules/org.restlet.ext.xml/src/org/restlet/ext/xml/XmlRepresentation.java
index a2a29f226a..da0cb51ac7 100644
--- a/modules/org.restlet.ext.xml/src/org/restlet/ext/xml/XmlRepresentation.java
+++ b/modules/org.restlet.ext.xml/src/org/restlet/ext/xml/XmlRepresentation.java
@@ -68,6 +68,62 @@ public abstract class XmlRepresentation extends OutputRepresentation
// [enddef]
{
+
+ // [ifdef android] method
+ /**
+ * Appends the text content of a given node and its descendants to the given
+ * buffer.
+ *
+ * @param node
+ * The node.
+ * @param sb
+ * The buffer.
+ */
+ private static void appendTextContent(Node node, StringBuilder sb) {
+ switch (node.getNodeType()) {
+ case Node.TEXT_NODE:
+ sb.append(node.getNodeValue());
+ break;
+ case Node.CDATA_SECTION_NODE:
+ sb.append(node.getNodeValue());
+ break;
+ case Node.COMMENT_NODE:
+ sb.append(node.getNodeValue());
+ break;
+ case Node.PROCESSING_INSTRUCTION_NODE:
+ sb.append(node.getNodeValue());
+ break;
+ case Node.ENTITY_REFERENCE_NODE:
+ if (node.getNodeName().startsWith("#")) {
+ int ch = Integer.parseInt(node.getNodeName().substring(1));
+ sb.append((char) ch);
+ }
+ break;
+ case Node.ELEMENT_NODE:
+ for (int i = 0; i < node.getChildNodes().getLength(); i++) {
+ appendTextContent(node.getChildNodes().item(i), sb);
+ }
+ break;
+ case Node.ATTRIBUTE_NODE:
+ for (int i = 0; i < node.getChildNodes().getLength(); i++) {
+ appendTextContent(node.getChildNodes().item(i), sb);
+ }
+ break;
+ case Node.ENTITY_NODE:
+ for (int i = 0; i < node.getChildNodes().getLength(); i++) {
+ appendTextContent(node.getChildNodes().item(i), sb);
+ }
+ break;
+ case Node.DOCUMENT_FRAGMENT_NODE:
+ for (int i = 0; i < node.getChildNodes().getLength(); i++) {
+ appendTextContent(node.getChildNodes().item(i), sb);
+ }
+ break;
+ default:
+ break;
+ }
+ }
+
// [ifndef android] method
/**
* Returns a SAX source.
@@ -141,6 +197,20 @@ private static String getSchemaLanguageUri(
return result;
}
+ // [ifdef android] method
+ /**
+ * Returns the text content of a given node and its descendants.
+ *
+ * @param node
+ * The node.
+ * @return The text content of a given node.
+ */
+ public static String getTextContent(Node node) {
+ StringBuilder sb = new StringBuilder();
+ appendTextContent(node, sb);
+ return sb.toString();
+ }
+
/**
* A SAX {@link EntityResolver} to use when resolving external entity
* references while parsing this type of XML representations.
@@ -578,6 +648,19 @@ public void setNamespaceAware(boolean namespaceAware) {
this.namespaceAware = namespaceAware;
}
+ // [ifndef android] method
+ /**
+ * Set a (compiled) {@link javax.xml.validation.Schema} to use when parsing
+ * and validating this type of XML representations.
+ *
+ * @param schema
+ * The (compiled) {@link javax.xml.validation.Schema} object to
+ * set.
+ */
+ public void setSchema(javax.xml.validation.Schema schema) {
+ this.schema = schema;
+ }
+
// [ifndef android] method
/**
* Set a schema representation to be compiled and used when parsing and
@@ -595,19 +678,6 @@ public void setSchema(Representation schemaRepresentation) {
}
}
- // [ifndef android] method
- /**
- * Set a (compiled) {@link javax.xml.validation.Schema} to use when parsing
- * and validating this type of XML representations.
- *
- * @param schema
- * The (compiled) {@link javax.xml.validation.Schema} object to
- * set.
- */
- public void setSchema(javax.xml.validation.Schema schema) {
- this.schema = schema;
- }
-
/**
* Indicates the desire for validating this type of XML representations
* against an XML schema if one is referenced within the contents.
@@ -634,50 +704,50 @@ public void setXIncludeAware(boolean includeAware) {
/**
* Validates the XML representation against a given schema.
*
- * @param schemaRepresentation
- * The XML schema representation to use.
+ * @param schema
+ * The XML schema to use.
*/
- public void validate(Representation schemaRepresentation) throws Exception {
- validate(schemaRepresentation, null);
+ public void validate(javax.xml.validation.Schema schema) throws Exception {
+ validate(schema, null);
}
// [ifndef android] method
/**
* Validates the XML representation against a given schema.
*
- * @param schemaRepresentation
- * The XML schema representation to use.
+ * @param schema
+ * The XML schema to use.
* @param result
* The Result object that receives (possibly augmented) XML.
*/
- public void validate(Representation schemaRepresentation,
+ public void validate(javax.xml.validation.Schema schema,
javax.xml.transform.Result result) throws Exception {
- validate(getSchema(schemaRepresentation), result);
+ schema.newValidator().validate(getSaxSource(), result);
}
// [ifndef android] method
/**
* Validates the XML representation against a given schema.
*
- * @param schema
- * The XML schema to use.
+ * @param schemaRepresentation
+ * The XML schema representation to use.
*/
- public void validate(javax.xml.validation.Schema schema) throws Exception {
- validate(schema, null);
+ public void validate(Representation schemaRepresentation) throws Exception {
+ validate(schemaRepresentation, null);
}
// [ifndef android] method
/**
* Validates the XML representation against a given schema.
*
- * @param schema
- * The XML schema to use.
+ * @param schemaRepresentation
+ * The XML schema representation to use.
* @param result
* The Result object that receives (possibly augmented) XML.
*/
- public void validate(javax.xml.validation.Schema schema,
+ public void validate(Representation schemaRepresentation,
javax.xml.transform.Result result) throws Exception {
- schema.newValidator().validate(getSaxSource(), result);
+ validate(getSchema(schemaRepresentation), result);
}
}
|
07d58c5537eb7c63194c49de7f75c79834a3f442
|
restlet-framework-java
|
Completed implementation of apispark extension.--
|
a
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet.ext.apispark/.classpath b/modules/org.restlet.ext.apispark/.classpath
index 3474bbacbd..3d20c6d659 100644
--- a/modules/org.restlet.ext.apispark/.classpath
+++ b/modules/org.restlet.ext.apispark/.classpath
@@ -3,5 +3,7 @@
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7"/>
<classpathentry kind="src" path="src"/>
<classpathentry combineaccessrules="false" exported="true" kind="src" path="/org.restlet"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/org.restlet.ext.jackson"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/com.fasterxml.jackson"/>
<classpathentry kind="output" path="bin"/>
</classpath>
diff --git a/modules/org.restlet.ext.apispark/module.xml b/modules/org.restlet.ext.apispark/module.xml
index ba038b1000..baaf978298 100644
--- a/modules/org.restlet.ext.apispark/module.xml
+++ b/modules/org.restlet.ext.apispark/module.xml
@@ -9,5 +9,6 @@
<dependencies>
<dependency type="module" id="core" />
+ <dependency type="module" id="jackson" />
</dependencies>
</module>
\ No newline at end of file
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Account.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Account.java
index ba445cdac4..5f4803a777 100644
--- a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Account.java
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Account.java
@@ -2,9 +2,8 @@
public class Account {
- public void createApi(Contract api){
-
- }
-
-
+ public void createApi(Contract api) {
+
+ }
+
}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Body.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Body.java
index be8e207d46..d7db2bfb7f 100644
--- a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Body.java
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Body.java
@@ -3,27 +3,27 @@
public class Body {
/**
- * Reference of the representation in the body
- * of the message
- */
- private String type;
-
- /**
- * Indicates whether you should provide an array
- * of [type] or just one [type]
+ * Indicates whether you should provide an array of [type] or just one
+ * [type].
*/
private boolean array;
-
+
+ /** Reference of the representation in the body of the message. */
+ private String type;
+
public String getRepresentation() {
return type;
}
- public void setRepresentation(String representation) {
- this.type = representation;
- }
+
public boolean isArray() {
return array;
}
+
public void setArray(boolean array) {
this.array = array;
}
+
+ public void setRepresentation(String representation) {
+ this.type = representation;
+ }
}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Contract.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Contract.java
index b9ad9209eb..eacc795a92 100644
--- a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Contract.java
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Contract.java
@@ -3,57 +3,50 @@
import java.util.List;
public class Contract {
-
- /**
- * Name of the API
- */
- private String name;
-
- /**
- * Textual description of the API
- */
+
+ /** Textual description of the API. */
private String description;
-
+
+ /** Name of the API. */
+ private String name;
+
/**
- * Representations available with this API
- * Note: their "name" is used as a reference further in
- * this description
+ * Representations available with this API Note: their "name" is used as a
+ * reference further in this description.
*/
private List<Representation> Representations;
-
- /**
- * Resources provided by the API
- */
+
+ /** Resources provided by the API. */
private List<Resource> resources;
+ public String getDescription() {
+ return description;
+ }
+
public String getName() {
return name;
}
- public void setName(String name) {
- this.name = name;
+ public List<Representation> getRepresentations() {
+ return Representations;
}
- public String getDescription() {
- return description;
+ public List<Resource> getResources() {
+ return resources;
}
public void setDescription(String description) {
this.description = description;
}
- public List<Representation> getRepresentations() {
- return Representations;
+ public void setName(String name) {
+ this.name = name;
}
public void setRepresentations(List<Representation> representations) {
Representations = representations;
}
- public List<Resource> getResources() {
- return resources;
- }
-
public void setResources(List<Resource> resources) {
this.resources = resources;
}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Documentation.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Documentation.java
index 7ce075cafb..310e43c177 100644
--- a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Documentation.java
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Documentation.java
@@ -2,71 +2,62 @@
public class Documentation {
- /**
- * Current version of the API
- */
- private String version;
-
- /**
- * URL of the description of the license used by the API
- */
- private String license;
-
- /**
- * Base URL on which you can access the API
- * Note: will enable multiple endpoints and protocols in
- * the future (use class Endpoint in a list)
- */
- private String endpoint;
-
- /**
- * E-mail of the person to contact for further information
- * or user acces on the API
- */
+ /** Any useful information for a user that plans to access to the API. */
private String contact;
-
+
+ /** Contract of this API. */
+ private Contract contract;
+
/**
- * Contract of this API
+ * Base URL on which you can access the API<br>
+ * Note: will enable multiple endpoints and protocols in the future (use
+ * class Endpoint in a list).
*/
- private Contract contract;
+ private String endpoint;
- public String getVersion() {
- return version;
- }
+ /** URL of the description of the license used by the API. */
+ private String license;
- public void setVersion(String version) {
- this.version = version;
- }
+ /** Current version of the API. */
+ private String version;
- public String getLicense() {
- return license;
+ public String getContact() {
+ return contact;
}
- public void setLicense(String license) {
- this.license = license;
+ public Contract getContract() {
+ return contract;
}
public String getEndpoint() {
return endpoint;
}
- public void setEndpoint(String endpoint) {
- this.endpoint = endpoint;
+ public String getLicense() {
+ return license;
}
- public String getContact() {
- return contact;
+ public String getVersion() {
+ return version;
}
public void setContact(String contact) {
this.contact = contact;
}
- public Contract getContract() {
- return contract;
- }
-
public void setContract(Contract contract) {
this.contract = contract;
}
+
+ public void setEndpoint(String endpoint) {
+ this.endpoint = endpoint;
+ }
+
+ public void setLicense(String license) {
+ this.license = license;
+ }
+
+ public void setVersion(String version) {
+ this.version = version;
+ }
}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Endpoint.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Endpoint.java
index 556b567ae6..b84d182c93 100644
--- a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Endpoint.java
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Endpoint.java
@@ -4,37 +4,36 @@
public class Endpoint {
- /**
- * Protocol used for this endpoint
- */
- private Protocol protocol;
-
- /**
- * Address of the host
- */
+ /** The host's name. */
private String host;
-
- /**
- * Port used for this endpoint
- */
- private Integer port;
-
- public Protocol getProtocol() {
- return protocol;
- }
- public void setProtocol(Protocol protocol) {
- this.protocol = protocol;
- }
+
+ /** The endpoint's port. */
+ private int port;
+
+ /** Protocol used for this endpoint. */
+ private Protocol protocol;
+
public String getHost() {
return host;
}
+
+ public int getPort() {
+ return port;
+ }
+
+ public Protocol getProtocol() {
+ return protocol;
+ }
+
public void setHost(String host) {
this.host = host;
}
- public Integer getPort() {
- return port;
- }
- public void setPort(Integer port) {
+
+ public void setPort(int port) {
this.port = port;
}
+
+ public void setProtocol(Protocol protocol) {
+ this.protocol = protocol;
+ }
}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Method.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Method.java
index 27e9ad492a..431d26def3 100644
--- a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Method.java
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Method.java
@@ -1,5 +1,26 @@
package org.restlet.ext.apispark;
public class Method {
+ /** Textual description of this method. */
+ private String description;
+
+ /** Name of this method. */
+ private String name;
+
+ public String getDescription() {
+ return description;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Operation.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Operation.java
index 31863aa858..94ef0649dd 100644
--- a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Operation.java
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Operation.java
@@ -4,105 +4,107 @@
public class Operation {
- /**
- * HTTP method for this operation
- */
- private Method method;
-
- /**
- * Textual description of this operation
- */
+ /** Textual description of this operation. */
private String description;
-
+
+ /** Headers to use for this operation. */
+ private List<Parameter> headers;
+
+ /** Representation retrieved by this operation if any. */
+ private Body inRepresentation;
+
+ /** HTTP method for this operation. */
+ private Method method;
+
/**
- * Unique name for this operation
- * Note: will be used for client SDK generation in
- * the future
+ * Unique name for this operation<br>
+ * Note: will be used for client SDK generation in the future.
*/
private String name;
-
- /**
- * Representation retrieved by this operation if any
- */
- private Body inRepresentation;
-
+
/**
- * Representation to send in the body of your request
- * for this operation if any
+ * Representation to send in the body of your request for this operation if
+ * any.
*/
private Body outRepresentation;
-
- /**
- * Query parameters available for this operation
- */
- private List<Parameter> queryParameters;
-
- /**
- * Headers to use for this operation
- */
- private List<Parameter> headers;
-
- /**
- * Path variables you must provide for this operation
- */
+
+ /** ath variables you must provide for this operation. */
private List<PathVariable> pathVariables;
-
- /**
- * Possible response messages you could encounter
- */
+
+ /** Query parameters available for this operation. */
+ private List<Parameter> queryParameters;
+
+ /** Possible response messages you could encounter. */
private List<Response> responses;
-
- public Method getMethod() {
- return method;
- }
- public void setMethod(Method method) {
- this.method = method;
- }
+
public String getDescription() {
return description;
}
- public void setDescription(String description) {
- this.description = description;
- }
- public String getName() {
- return name;
- }
- public void setName(String name) {
- this.name = name;
+
+ public List<Parameter> getHeaders() {
+ return headers;
}
+
public Body getInRepresentation() {
return inRepresentation;
}
- public void setInRepresentation(Body inRepresentation) {
- this.inRepresentation = inRepresentation;
+
+ public Method getMethod() {
+ return method;
}
+
+ public String getName() {
+ return name;
+ }
+
public Body getOutRepresentation() {
return outRepresentation;
}
- public void setOutRepresentation(Body outRepresentation) {
- this.outRepresentation = outRepresentation;
+
+ public List<PathVariable> getPathVariables() {
+ return pathVariables;
}
+
public List<Parameter> getQueryParameters() {
return queryParameters;
}
- public void setQueryParameters(List<Parameter> queryParameters) {
- this.queryParameters = queryParameters;
+
+ public List<Response> getResponses() {
+ return responses;
}
- public List<Parameter> getHeaders() {
- return headers;
+
+ public void setDescription(String description) {
+ this.description = description;
}
+
public void setHeaders(List<Parameter> headers) {
this.headers = headers;
}
- public List<PathVariable> getPathVariables() {
- return pathVariables;
+
+ public void setInRepresentation(Body inRepresentation) {
+ this.inRepresentation = inRepresentation;
}
+
+ public void setMethod(Method method) {
+ this.method = method;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public void setOutRepresentation(Body outRepresentation) {
+ this.outRepresentation = outRepresentation;
+ }
+
public void setPathVariables(List<PathVariable> pathVariables) {
this.pathVariables = pathVariables;
}
- public List<Response> getResponses() {
- return responses;
+
+ public void setQueryParameters(List<Parameter> queryParameters) {
+ this.queryParameters = queryParameters;
}
+
public void setResponses(List<Response> responses) {
this.responses = responses;
}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Parameter.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Parameter.java
index 0c13d91ad4..5cffaadc06 100644
--- a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Parameter.java
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Parameter.java
@@ -5,71 +5,74 @@
public class Parameter {
/**
- * Name of the parameter
- */
- private String name;
-
- /**
- * Textual description of this parameter
- */
- private String description;
-
- /**
- * Default value of the parameter
+ * Indicates whether you can provide multiple values for this parameter or
+ * not.
*/
+ private boolean allowMultiple;
+
+ /** Default value of the parameter. */
private String defaultValue;
-
+
+ /** Textual description of this parameter. */
+ private String description;
+
+ /** Name of the parameter. */
+ private String name;
+
/**
- * List of possible values of the parameter if there
- * is a limited number of possible values for it
+ * List of possible values of the parameter if there is a limited number of
+ * possible values for it.
*/
private List<String> possibleValues;
-
- /**
- * Indicates whether the parameter is mandatory or not
- */
+
+ /** Indicates whether the parameter is mandatory or not. */
private boolean required;
-
- /**
- * Indicates whether you can provide multiple values
- * for this parameter or not
- */
- private boolean allowMultiple;
-
+
+ public String getDefaultValue() {
+ return defaultValue;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
public String getName() {
return name;
}
- public void setName(String name) {
- this.name = name;
+
+ public List<String> getPossibleValues() {
+ return possibleValues;
}
- public String getDescription() {
- return description;
+
+ public boolean isAllowMultiple() {
+ return allowMultiple;
}
- public void setDescription(String description) {
- this.description = description;
+
+ public boolean isRequired() {
+ return required;
}
- public String getDefaultValue() {
- return defaultValue;
+
+ public void setAllowMultiple(boolean allowMultiple) {
+ this.allowMultiple = allowMultiple;
}
+
public void setDefaultValue(String defaultValue) {
this.defaultValue = defaultValue;
}
- public List<String> getPossibleValues() {
- return possibleValues;
+
+ public void setDescription(String description) {
+ this.description = description;
}
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
public void setPossibleValues(List<String> possibleValues) {
this.possibleValues = possibleValues;
}
- public boolean isRequired() {
- return required;
- }
+
public void setRequired(boolean required) {
this.required = required;
}
- public boolean isAllowMultiple() {
- return allowMultiple;
- }
- public void setAllowMultiple(boolean allowMultiple) {
- this.allowMultiple = allowMultiple;
- }
}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/PathVariable.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/PathVariable.java
index c472f18486..12b38dc784 100644
--- a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/PathVariable.java
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/PathVariable.java
@@ -2,36 +2,21 @@
public class PathVariable {
- /**
- * Name of this variable
- */
- private String name;
-
- /**
- * Textual description of this variable
- */
- private String description;
-
- /**
- * Indicates whether you can provide a list of values
- * or just a single one
- */
+ /** Indicates whether you can provide a list of values or just a single one. */
private boolean array;
- public String getName() {
- return name;
- }
+ /** Textual description of this variable. */
+ private String description;
- public void setName(String name) {
- this.name = name;
- }
+ /** Name of this variable. */
+ private String name;
public String getDescription() {
return description;
}
- public void setDescription(String description) {
- this.description = description;
+ public String getName() {
+ return name;
}
public boolean isArray() {
@@ -41,4 +26,12 @@ public boolean isArray() {
public void setArray(boolean array) {
this.array = array;
}
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Property.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Property.java
index 888a64cd69..26da507f76 100644
--- a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Property.java
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Property.java
@@ -5,122 +5,135 @@
public class Property {
/**
- * Name ot this property
- */
- private String name;
-
- /**
- * Textual description of this property
- */
- private String description;
-
- /**
- * Type of this property, either a primitive type or
- * a reference to a representation
+ * Type of this property, either a primitive type or a reference to a
+ * representation.
*/
private String dataType;
-
+
+ // TODO review comment
/**
- * Default value if this property is of a primitive type
- * Note: need to check casts for non-String primitive
- * types
+ * Default value if this property is of a primitive type<br>
+ * Note: need to check casts for non-String primitive types
*/
private String defaultValue;
-
+
+ /** Textual description of this property. */
+ private String description;
+
+ // TODO review comment
/**
- * A list of possible values for this property if it has
- * a limited number of possible values
+ * Maximum value of this property if it is a number Note: check casts
*/
- private List<String> possibleValues;
-
+ private String max;
+
+ // TODO review comment
+ /** Maximum number of occurences of the items of this property. */
+ private Integer maxOccurs;
+
+ // TODO review comment
/**
- * Minimum value of this property if it is a number
- * Note: check casts
+ * Minimum value of this property if it is a number Note: check casts
*/
private String min;
-
+
+ // TODO review comment
+ /** Minimum number of occurences of the items of this property. */
+ private Integer minOccurs;
+
+ /** Name of this property. */
+ private String name;
+
+ // TODO review comment
/**
- * Maximum value of this property if it is a number
- * Note: check casts
+ * A list of possible values for this property if it has a limited number of
+ * possible values.
*/
- private String max;
-
+ private List<String> possibleValues;
+
+ // TODO review comment
/**
- * If maxOccurs > 1, indicates whether each item in
- * this property is supposed to be unique or not
+ * If maxOccurs > 1, indicates whether each item in this property is
+ * supposed to be unique or not
*/
private boolean uniqueItems;
-
- /**
- * Minimum number of occurences of the items of this
- * property
- */
- private Integer minOccurs;
-
- /**
- * Maximum number of occurences of the items of this
- * property
- */
- private Integer maxOccurs;
-
- public String getName() {
- return name;
- }
- public void setName(String name) {
- this.name = name;
+
+ public String getDefaultValue() {
+ return defaultValue;
}
+
public String getDescription() {
return description;
}
- public void setDescription(String description) {
- this.description = description;
+
+ public String getMax() {
+ return max;
}
- public String getType() {
- return dataType;
+
+ public Integer getMaxOccurs() {
+ return maxOccurs;
}
- public void setType(String type) {
- this.dataType = type;
+
+ public String getMin() {
+ return min;
}
- public String getDefaultValue() {
- return defaultValue;
+
+ public Integer getMinOccurs() {
+ return minOccurs;
}
- public void setDefaultValue(String defaultValue) {
- this.defaultValue = defaultValue;
+
+ public String getName() {
+ return name;
}
+
public List<String> getPossibleValues() {
return possibleValues;
}
- public void setPossibleValues(List<String> possibleValues) {
- this.possibleValues = possibleValues;
+
+ public String getType() {
+ return dataType;
}
- public String getMin() {
- return min;
+
+ public boolean isUniqueItems() {
+ return uniqueItems;
}
- public void setMin(String min) {
- this.min = min;
+
+ public void setDefaultValue(String defaultValue) {
+ this.defaultValue = defaultValue;
}
- public String getMax() {
- return max;
+
+ public void setDescription(String description) {
+ this.description = description;
}
+
public void setMax(String max) {
this.max = max;
}
- public boolean isUniqueItems() {
- return uniqueItems;
- }
- public void setUniqueItems(boolean uniqueItems) {
- this.uniqueItems = uniqueItems;
+
+ public void setMaxOccurs(Integer maxOccurs) {
+ this.maxOccurs = maxOccurs;
}
- public Integer getMinOccurs() {
- return minOccurs;
+
+ public void setMin(String min) {
+ this.min = min;
}
+
public void setMinOccurs(Integer minOccurs) {
this.minOccurs = minOccurs;
}
- public Integer getMaxOccurs() {
- return maxOccurs;
+
+ public void setName(String name) {
+ this.name = name;
}
- public void setMaxOccurs(Integer maxOccurs) {
- this.maxOccurs = maxOccurs;
+
+ public void setPossibleValues(List<String> possibleValues) {
+ this.possibleValues = possibleValues;
+ }
+
+ public void setType(String type) {
+ this.dataType = type;
+ }
+
+ public void setUniqueItems(boolean uniqueItems) {
+ this.uniqueItems = uniqueItems;
}
}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Representation.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Representation.java
index 4524407036..a2d4d780cd 100644
--- a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Representation.java
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Representation.java
@@ -4,59 +4,58 @@
public class Representation {
- /**
- * Name of the representation
- */
- private String name;
-
- /**
- * Textual description of this representation
- */
+ /** Textual description of this representation. */
private String description;
-
- /**
- * Reference to its parent type if any
- */
+
+ /** Name of the representation. */
+ private String name;
+
+ /** Reference to its parent type if any. */
private String parentType;
-
- /**
- * List of variants available for this representation
- */
- private List<Variant> variants;
-
- /**
- * List of this representation's properties
- */
+
+ /** List of this representation's properties. */
private List<Property> properties;
-
- public String getName() {
- return name;
- }
- public void setName(String name) {
- this.name = name;
- }
+
+ /** List of variants available for this representation. */
+ private List<Variant> variants;
+
public String getDescription() {
return description;
}
- public void setDescription(String description) {
- this.description = description;
+
+ public String getName() {
+ return name;
}
+
public String getParentType() {
return parentType;
}
- public void setParentType(String parentType) {
- this.parentType = parentType;
+
+ public List<Property> getProperties() {
+ return properties;
}
+
public List<Variant> getVariants() {
return variants;
}
- public void setVariants(List<Variant> variants) {
- this.variants = variants;
+
+ public void setDescription(String description) {
+ this.description = description;
}
- public List<Property> getProperties() {
- return properties;
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public void setParentType(String parentType) {
+ this.parentType = parentType;
}
+
public void setProperties(List<Property> properties) {
this.properties = properties;
}
+
+ public void setVariants(List<Variant> variants) {
+ this.variants = variants;
+ }
}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Resource.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Resource.java
index 36f64d2597..a65bc9354e 100644
--- a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Resource.java
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Resource.java
@@ -4,55 +4,47 @@
public class Resource {
- /**
- * Name of this resource
- */
+ /** Textual description of this resource */
+ private String description;
+
+ /** Name of this resource */
private String name;
-
- /**
- * Relative path from the endpoint to this resource
- */
- private String resourcePath;
-
- /**
- * List of the APIs this resource provides
- */
+
+ /** List of the APIs this resource provides */
private List<Operation> operations;
-
- /**
- * Textual description of this resource
- */
- private String description;
+
+ /** Relative path from the endpoint to this resource */
+ private String resourcePath;
+
+ public String getDescription() {
+ return description;
+ }
public String getName() {
return name;
}
- public void setName(String name) {
- this.name = name;
+ public List<Operation> getOperations() {
+ return operations;
}
public String getResourcePath() {
return resourcePath;
}
- public void setResourcePath(String resourcePath) {
- this.resourcePath = resourcePath;
+ public void setDescription(String description) {
+ this.description = description;
}
- public List<Operation> getOperations() {
- return operations;
+ public void setName(String name) {
+ this.name = name;
}
public void setOperations(List<Operation> operations) {
this.operations = operations;
}
- public String getDescription() {
- return description;
- }
-
- public void setDescription(String description) {
- this.description = description;
+ public void setResourcePath(String resourcePath) {
+ this.resourcePath = resourcePath;
}
}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Response.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Response.java
index 20d2a3ce97..e4b50812c2 100644
--- a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Response.java
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Response.java
@@ -1,71 +1,69 @@
package org.restlet.ext.apispark;
+import org.restlet.data.Status;
+
public class Response {
- /**
- * Name of this response
- */
- private String name;
-
- /**
- * Textual description of this response
- */
+ /** Custom content of the body if any. */
+ private Body body;
+
+ /** Status code of the response */
+ private int code;
+
+ /** Textual description of this response */
private String description;
-
- /**
- * HTTP code for the response
- * See: http://fr.wikipedia.org/wiki/Liste_des_codes_HTTP
- */
- private Integer code;
-
- /**
- * Textual message associated with code in RCF
- * See: http://fr.wikipedia.org/wiki/Liste_des_codes_HTTP
- */
+
+ /** Status message of the response. */
private String message;
-
+
+ /** Name of this response */
+ private String name;
+
/**
- * Custom content of the body if any
+ * Constructor. The default status code is {@link Status#SUCCESS_OK}.
*/
- private Body body;
+ public Response() {
+ setCode(Status.SUCCESS_OK.getCode());
+ setMessage(Status.SUCCESS_OK.getDescription());
+ }
- public String getName() {
- return name;
+ public Body getBody() {
+ return body;
}
- public void setName(String name) {
- this.name = name;
+ public int getCode() {
+ return code;
}
public String getDescription() {
return description;
}
- public void setDescription(String description) {
- this.description = description;
+ public String getMessage() {
+ return message;
}
- public Integer getCode() {
- return code;
+ public String getName() {
+ return name;
+ }
+
+ public void setBody(Body body) {
+ this.body = body;
}
- public void setCode(Integer code) {
+ public void setCode(int code) {
this.code = code;
}
- public String getMessage() {
- return message;
+ public void setDescription(String description) {
+ this.description = description;
}
public void setMessage(String message) {
this.message = message;
}
- public Body getBody() {
- return body;
- }
-
- public void setBody(Body body) {
- this.body = body;
+ public void setName(String name) {
+ this.name = name;
}
}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Variant.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Variant.java
index 772478d232..a302c63414 100644
--- a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Variant.java
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/Variant.java
@@ -2,29 +2,25 @@
public class Variant {
- /**
- * Textual description of this variant
- */
- private String description;
-
- /**
- * Must be a MIME type
- */
+ /** Must be a MIME type. */
private String dataType;
- public String getDescription() {
- return description;
- }
-
- public void setDescription(String description) {
- this.description = description;
- }
+ /** Textual description of this variant. */
+ private String description;
public String getDataType() {
return dataType;
}
+ public String getDescription() {
+ return description;
+ }
+
public void setDataType(String dataType) {
this.dataType = dataType;
}
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkApplication.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkApplication.java
new file mode 100644
index 0000000000..6d6a1766f6
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkApplication.java
@@ -0,0 +1,855 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+
+import org.restlet.Application;
+import org.restlet.Component;
+import org.restlet.Context;
+import org.restlet.Request;
+import org.restlet.Response;
+import org.restlet.Restlet;
+import org.restlet.Server;
+import org.restlet.data.MediaType;
+import org.restlet.data.Method;
+import org.restlet.data.Protocol;
+import org.restlet.data.Reference;
+import org.restlet.data.Status;
+import org.restlet.engine.Engine;
+import org.restlet.representation.Representation;
+import org.restlet.representation.Variant;
+import org.restlet.resource.Directory;
+import org.restlet.resource.Finder;
+import org.restlet.resource.ServerResource;
+import org.restlet.routing.Filter;
+import org.restlet.routing.Route;
+import org.restlet.routing.Router;
+import org.restlet.routing.TemplateRoute;
+import org.restlet.routing.VirtualHost;
+
+/**
+ * APISpark enabled application. This {@link Application} subclass can describe
+ * itself in APISpark by introspecting its content. You can obtain this
+ * representation with an OPTIONS request addressed exactly to the application
+ * URI (e.g. "http://host:port/path/to/application"). By default, the returned
+ * representation gleans the list of all attached {@link ServerResource} classes
+ * and calls {@link #getName()} to get the title and {@link #getDescription()}
+ * the textual content of the APISpark document generated. This default behavior
+ * can be customized by overriding the
+ * {@link #getApplicationInfo(Request, Response)} method.<br>
+ * <br>
+ * In case you want to customize the XSLT stylesheet, you can override the
+ * {@link #createAPISparkRepresentation(ApplicationInfo)} method and return an
+ * instance of an {@link ApisparkRepresentation} subclass overriding the
+ * {@link ApisparkRepresentation#getHtmlRepresentation()} method.<br>
+ * <br>
+ * In addition, this class can create an instance and configure it with an
+ * user-provided APISpark/XML document. In this case, it creates a root
+ * {@link Router} and for each resource found in the APISpark document, it tries
+ * to attach a {@link ServerResource} class to the router using its APISpark
+ * path. For this, it looks up the qualified name of the {@link ServerResource}
+ * subclass using the APISpark's "id" attribute of the "resource" elements. This
+ * is the only Restlet specific convention on the original APISpark document.<br>
+ * <br>
+ * To attach an application configured in this way to an existing component, you
+ * can call the {@link #attachToComponent(Component)} or the
+ * {@link #attachToHost(VirtualHost)} methods. In this case, it uses the "base"
+ * attribute of the APISpark "resources" element as the URI attachment path to
+ * the virtual host.<br>
+ * <br>
+ * Concurrency note: instances of this class or its subclasses can be invoked by
+ * several threads at the same time and therefore must be thread-safe. You
+ * should be especially careful when storing state in member variables. <br>
+ *
+ * @author Jerome Louvel
+ */
+public class ApisparkApplication extends Application {
+
+ /**
+ * Indicates if the application should be automatically described via
+ * APISpark when an OPTIONS request handles a "*" target URI.
+ */
+ private volatile boolean autoDescribing;
+
+ /** The APISpark base reference. */
+ private volatile Reference baseRef;
+
+ /** The router to {@link ServerResource} classes. */
+ private volatile Router router;
+
+ /**
+ * Creates an application that can automatically introspect and expose
+ * itself as with a APISpark description upon reception of an OPTIONS
+ * request on the "*" target URI.
+ */
+ public ApisparkApplication() {
+ this((Context) null);
+ }
+
+ /**
+ * Creates an application that can automatically introspect and expose
+ * itself as with a APISpark description upon reception of an OPTIONS
+ * request on the "*" target URI.
+ *
+ * @param context
+ * The context to use based on parent component context. This
+ * context should be created using the
+ * {@link Context#createChildContext()} method to ensure a proper
+ * isolation with the other applications.
+ */
+ public ApisparkApplication(Context context) {
+ super(context);
+ this.autoDescribing = true;
+ }
+
+ /**
+ * Creates an application described using a APISpark document. Creates a
+ * router where Resource classes are attached and set it as the root
+ * Restlet.
+ *
+ * By default the application is not automatically described. If you want
+ * to, you can call {@link #setAutoDescribing(boolean)}.
+ *
+ * @param context
+ * The context to use based on parent component context. This
+ * context should be created using the
+ * {@link Context#createChildContext()} method to ensure a proper
+ * isolation with the other applications.
+ * @param apispark
+ * The APISpark description document.
+ */
+ public ApisparkApplication(Context context, Representation apispark) {
+ super(context);
+ this.autoDescribing = false;
+
+ try {
+ // Instantiates a APISparkRepresentation of the APISpark document
+ ApisparkRepresentation apisparkRep = null;
+
+ if (apispark instanceof ApisparkRepresentation) {
+ apisparkRep = (ApisparkRepresentation) apispark;
+ } else {
+ // TODO to be done
+ // apisparkRep = new APISparkRepresentation(apispark);
+ }
+
+ final Router root = new Router(getContext());
+ this.router = root;
+ setInboundRoot(root);
+
+ if (apisparkRep.getApplication() != null) {
+ if (apisparkRep.getApplication().getResources() != null) {
+ for (final ResourceInfo resource : apisparkRep
+ .getApplication().getResources().getResources()) {
+ attachResource(resource, null, this.router);
+ }
+
+ // Analyzes the APISpark resources base
+ setBaseRef(apisparkRep.getApplication().getResources()
+ .getBaseRef());
+ }
+
+ // Set the name of the application as the title of the first
+ // documentation tag.
+ if (!apisparkRep.getApplication().getDocumentations().isEmpty()) {
+ setName(apisparkRep.getApplication().getDocumentations()
+ .get(0).getTitle());
+ }
+ }
+ } catch (Exception e) {
+ getLogger().log(Level.WARNING,
+ "Error during the attachment of the APISpark application",
+ e);
+ }
+ }
+
+ /**
+ * Creates an application described using a APISpark document. Creates a
+ * router where Resource classes are attached and set it as the root
+ * Restlet.
+ *
+ * By default the application is not automatically described. If you want
+ * to, you can call {@link #setAutoDescribing(boolean)}.
+ *
+ * @param apispark
+ * The APISpark description document.
+ */
+ public ApisparkApplication(Representation apispark) {
+ this(null, apispark);
+ }
+
+ /**
+ * Adds the necessary server connectors to the component.
+ *
+ * @param component
+ * The parent component to update.
+ */
+ private void addConnectors(Component component) {
+ // Create the server connector
+ Protocol protocol = getBaseRef().getSchemeProtocol();
+ int port = getBaseRef().getHostPort();
+ boolean exists = false;
+
+ if (port == -1) {
+ for (Server server : component.getServers()) {
+ if (server.getProtocols().contains(protocol)
+ && (server.getPort() == protocol.getDefaultPort())) {
+ exists = true;
+ }
+ }
+
+ if (!exists) {
+ component.getServers().add(protocol);
+ }
+ } else {
+ for (Server server : component.getServers()) {
+ if (server.getProtocols().contains(protocol)
+ && (server.getPort() == port)) {
+ exists = true;
+ }
+ }
+
+ if (!exists) {
+ component.getServers().add(protocol, port);
+ }
+ }
+ }
+
+ /**
+ * Represents the resource as a APISpark description.
+ *
+ * @param request
+ * The current request.
+ * @param response
+ * The current response.
+ * @return The APISpark description.
+ */
+ protected Representation apisparkRepresent(Request request,
+ Response response) {
+ return apisparkRepresent(getPreferredAPISparkVariant(request), request,
+ response);
+ }
+
+ /**
+ * Represents the resource as a APISpark description for the given variant.
+ *
+ * @param variant
+ * The APISpark variant.
+ * @param request
+ * The current request.
+ * @param response
+ * The current response.
+ * @return The APISpark description.
+ */
+ protected Representation apisparkRepresent(Variant variant,
+ Request request, Response response) {
+ Representation result = null;
+
+ if (variant != null) {
+ ApplicationInfo applicationInfo = getApplicationInfo(request,
+ response);
+ DocumentationInfo doc = null;
+
+ if ((getName() != null) && !"".equals(getName())) {
+ if (applicationInfo.getDocumentations().isEmpty()) {
+ doc = new DocumentationInfo();
+ applicationInfo.getDocumentations().add(doc);
+ } else {
+ doc = applicationInfo.getDocumentations().get(0);
+ }
+
+ doc.setTitle(getName());
+ }
+
+ if ((doc != null) && (getDescription() != null)
+ && !"".equals(getDescription())) {
+ doc.setTextContent(getDescription());
+ }
+
+ if (MediaType.APPLICATION_JSON.equals(variant.getMediaType())) {
+ result = createAPISparkRepresentation(applicationInfo);
+ }
+ }
+
+ return result;
+ }
+
+ /**
+ * Attaches a resource, as specified in a APISpark document, to a specified
+ * router, then recursively attaches its child resources.
+ *
+ * @param currentResource
+ * The resource to attach.
+ * @param parentResource
+ * The parent resource. Needed to correctly resolve the "path" of
+ * the resource. Should be null if the resource is root-level.
+ * @param router
+ * The router to which to attach the resource and its children.
+ * @throws ClassNotFoundException
+ * If the class name specified in the "id" attribute of the
+ * resource does not exist, this exception will be thrown.
+ */
+ private void attachResource(ResourceInfo currentResource,
+ ResourceInfo parentResource, Router router)
+ throws ClassNotFoundException {
+
+ String uriPattern = currentResource.getPath();
+
+ // If there is a parentResource, add its uriPattern to this one
+ if (parentResource != null) {
+ String parentUriPattern = parentResource.getPath();
+
+ if ((parentUriPattern.endsWith("/") == false)
+ && (uriPattern.startsWith("/") == false)) {
+ parentUriPattern += "/";
+ }
+
+ uriPattern = parentUriPattern + uriPattern;
+ currentResource.setPath(uriPattern);
+ } else if (!uriPattern.startsWith("/")) {
+ uriPattern = "/" + uriPattern;
+ currentResource.setPath(uriPattern);
+ }
+
+ Finder finder = createFinder(router, uriPattern, currentResource);
+
+ if (finder != null) {
+ // Attach the resource itself
+ router.attach(uriPattern, finder);
+ }
+
+ // Attach children of the resource
+ for (ResourceInfo childResource : currentResource.getChildResources()) {
+ attachResource(childResource, currentResource, router);
+ }
+ }
+
+ /**
+ * Attaches the application to the given component if the application has a
+ * APISpark base reference. The application will be attached to an existing
+ * virtual host if possible, otherwise a new one will be created.
+ *
+ * @param component
+ * The parent component to update.
+ * @return The parent virtual host.
+ */
+ public VirtualHost attachToComponent(Component component) {
+ VirtualHost result = null;
+
+ if (getBaseRef() != null) {
+ // Create the virtual host
+ result = getVirtualHost(component);
+
+ // Attach the application to the virtual host
+ attachToHost(result);
+
+ // Adds the necessary server connectors
+ addConnectors(component);
+ } else {
+ getLogger()
+ .warning(
+ "The APISpark application has no base reference defined. Unable to guess the virtual host.");
+ }
+
+ return result;
+ }
+
+ /**
+ * Attaches the application to the given host using the APISpark base
+ * reference.
+ *
+ * @param host
+ * The virtual host to attach to.
+ */
+ public void attachToHost(VirtualHost host) {
+ if (getBaseRef() != null) {
+ final String path = getBaseRef().getPath();
+ if (path == null) {
+ host.attach("", this);
+ } else {
+ host.attach(path, this);
+ }
+
+ } else {
+ getLogger()
+ .warning(
+ "The APISpark application has no base reference defined. Unable to guess the virtual host.");
+ }
+ }
+
+ /**
+ * Indicates if the application and all its resources can be described using
+ * APISpark.
+ *
+ * @param remainingPart
+ * The URI remaining part.
+ * @param request
+ * The request to handle.
+ * @param response
+ * The response to update.
+ */
+ protected boolean canDescribe(String remainingPart, Request request,
+ Response response) {
+ return isAutoDescribing()
+ && Method.OPTIONS.equals(request.getMethod())
+ && (response.getStatus().isClientError() || !response
+ .isEntityAvailable())
+ && ("/".equals(remainingPart) || "".equals(remainingPart));
+ }
+
+ /**
+ * Creates a new APISpark representation for a given {@link ApplicationInfo}
+ * instance describing an application.
+ *
+ * @param applicationInfo
+ * The application description.
+ * @return The created {@link ApisparkRepresentation}.
+ */
+ protected Representation createAPISparkRepresentation(
+ ApplicationInfo applicationInfo) {
+ return new ApisparkRepresentation(applicationInfo);
+ }
+
+ /**
+ * Creates a finder for the given resource info. By default, it looks up for
+ * an "id" attribute containing a fully qualified class name.
+ *
+ * @param router
+ * The parent router.
+ * @param resourceInfo
+ * The APISpark resource descriptor.
+ * @return The created finder.
+ * @throws ClassNotFoundException
+ */
+ @SuppressWarnings("unchecked")
+ protected Finder createFinder(Router router, String uriPattern,
+ ResourceInfo resourceInfo) throws ClassNotFoundException {
+ Finder result = null;
+
+ if (resourceInfo.getIdentifier() != null) {
+ // The "id" attribute conveys the target class name
+ Class<? extends ServerResource> targetClass = (Class<? extends ServerResource>) Engine
+ .loadClass(resourceInfo.getIdentifier());
+ result = router.createFinder(targetClass);
+ } else {
+ getLogger()
+ .fine("Unable to find the 'id' attribute of the resource element with this path attribute \""
+ + uriPattern + "\"");
+ }
+
+ return result;
+ }
+
+ /**
+ * Returns the available APISpark variants.
+ *
+ * @return The available APISpark variants.
+ */
+ protected List<Variant> getAPISparkVariants() {
+ final List<Variant> result = new ArrayList<Variant>();
+ result.add(new Variant(MediaType.APPLICATION_JSON));
+ result.add(new Variant(MediaType.APPLICATION_XML));
+ result.add(new Variant(MediaType.TEXT_XML));
+ return result;
+ }
+
+ /**
+ * Returns a APISpark description of the current application. By default,
+ * this method discovers all the resources attached to this application. It
+ * can be overridden to add documentation, list of representations, etc.
+ *
+ * @param request
+ * The current request.
+ * @param response
+ * The current response.
+ * @return An application description.
+ */
+ protected ApplicationInfo getApplicationInfo(Request request,
+ Response response) {
+ ApplicationInfo applicationInfo = new ApplicationInfo();
+ applicationInfo.getResources().setBaseRef(
+ request.getResourceRef().getBaseRef());
+ applicationInfo.getResources().setResources(
+ getResourceInfos(applicationInfo,
+ getNextRouter(getInboundRoot()), request, response));
+ return applicationInfo;
+ }
+
+ /**
+ * Returns the APISpark base reference.
+ *
+ * @return The APISpark base reference.
+ */
+ public Reference getBaseRef() {
+ return this.baseRef;
+ }
+
+ /**
+ * Returns the next router available.
+ *
+ * @param current
+ * The current Restlet to inspect.
+ * @return The first router available.
+ */
+ private Router getNextRouter(Restlet current) {
+ Router result = getRouter();
+
+ if (result == null) {
+ if (current instanceof Router) {
+ result = (Router) current;
+ } else if (current instanceof Filter) {
+ result = getNextRouter(((Filter) current).getNext());
+ }
+ }
+
+ return result;
+ }
+
+ /**
+ * Returns the preferred APISpark variant according to the client
+ * preferences specified in the request.
+ *
+ * @param request
+ * The request including client preferences.
+ * @return The preferred APISpark variant.
+ */
+ protected Variant getPreferredAPISparkVariant(Request request) {
+ return getConnegService().getPreferredVariant(getAPISparkVariants(),
+ request, getMetadataService());
+ }
+
+ /**
+ * Completes the data available about a given Filter instance.
+ *
+ * @param applicationInfo
+ * The parent application.
+ * @param filter
+ * The Filter instance to document.
+ * @param path
+ * The base path.
+ * @param request
+ * The current request.
+ * @param response
+ * The current response.
+ * @return The resource description.
+ */
+ private ResourceInfo getResourceInfo(ApplicationInfo applicationInfo,
+ Filter filter, String path, Request request, Response response) {
+ return getResourceInfo(applicationInfo, filter.getNext(), path,
+ request, response);
+ }
+
+ /**
+ * Completes the data available about a given Finder instance.
+ *
+ * @param applicationInfo
+ * The parent application.
+ * @param resourceInfo
+ * The ResourceInfo object to complete.
+ * @param finder
+ * The Finder instance to document.
+ * @param request
+ * The current request.
+ * @param response
+ * The current response.
+ */
+ private ResourceInfo getResourceInfo(ApplicationInfo applicationInfo,
+ Finder finder, String path, Request request, Response response) {
+ ResourceInfo result = null;
+ Object resource = null;
+
+ // Save the current application
+ Application.setCurrent(this);
+
+ if (finder instanceof Directory) {
+ resource = finder;
+ } else {
+ // The handler instance targeted by this finder.
+ ServerResource sr = finder.find(request, response);
+
+ if (sr != null) {
+ sr.init(getContext(), request, response);
+ sr.updateAllowedMethods();
+ resource = sr;
+ }
+ }
+
+ if (resource != null) {
+ result = new ResourceInfo();
+ ResourceInfo.describe(applicationInfo, result, resource, path);
+ }
+
+ return result;
+ }
+
+ /**
+ * Completes the data available about a given Restlet instance.
+ *
+ * @param applicationInfo
+ * The parent application.
+ * @param resourceInfo
+ * The ResourceInfo object to complete.
+ * @param restlet
+ * The Restlet instance to document.
+ * @param request
+ * The current request.
+ * @param response
+ * The current response.
+ */
+ private ResourceInfo getResourceInfo(ApplicationInfo applicationInfo,
+ Restlet restlet, String path, Request request, Response response) {
+ ResourceInfo result = null;
+
+ if (restlet instanceof ApisparkDescribable) {
+ result = ((ApisparkDescribable) restlet)
+ .getResourceInfo(applicationInfo);
+ result.setPath(path);
+ } else if (restlet instanceof Finder) {
+ result = getResourceInfo(applicationInfo, (Finder) restlet, path,
+ request, response);
+ } else if (restlet instanceof Router) {
+ result = new ResourceInfo();
+ result.setPath(path);
+ result.setChildResources(getResourceInfos(applicationInfo,
+ (Router) restlet, request, response));
+ } else if (restlet instanceof Filter) {
+ result = getResourceInfo(applicationInfo, (Filter) restlet, path,
+ request, response);
+ }
+
+ return result;
+ }
+
+ /**
+ * Returns the APISpark data about the given Route instance.
+ *
+ * @param applicationInfo
+ * The parent application.
+ * @param route
+ * The Route instance to document.
+ * @param basePath
+ * The base path.
+ * @param request
+ * The current request.
+ * @param response
+ * The current response.
+ * @return The APISpark data about the given Route instance.
+ */
+ private ResourceInfo getResourceInfo(ApplicationInfo applicationInfo,
+ Route route, String basePath, Request request, Response response) {
+ ResourceInfo result = null;
+
+ if (route instanceof TemplateRoute) {
+ TemplateRoute templateRoute = (TemplateRoute) route;
+ String path = templateRoute.getTemplate().getPattern();
+
+ // APISpark requires resource paths to be relative to parent path
+ if (path.startsWith("/") && basePath.endsWith("/")) {
+ path = path.substring(1);
+ }
+
+ result = getResourceInfo(applicationInfo, route.getNext(), path,
+ request, response);
+ }
+
+ return result;
+ }
+
+ /**
+ * Completes the list of ResourceInfo instances for the given Router
+ * instance.
+ *
+ * @param applicationInfo
+ * The parent application.
+ * @param router
+ * The router to document.
+ * @param request
+ * The current request.
+ * @param response
+ * The current response.
+ * @return The list of ResourceInfo instances to complete.
+ */
+ private List<ResourceInfo> getResourceInfos(
+ ApplicationInfo applicationInfo, Router router, Request request,
+ Response response) {
+ List<ResourceInfo> result = new ArrayList<ResourceInfo>();
+
+ for (Route route : router.getRoutes()) {
+ ResourceInfo resourceInfo = getResourceInfo(applicationInfo, route,
+ "/", request, response);
+
+ if (resourceInfo != null) {
+ result.add(resourceInfo);
+ }
+ }
+
+ if (router.getDefaultRoute() != null) {
+ ResourceInfo resourceInfo = getResourceInfo(applicationInfo,
+ router.getDefaultRoute(), "/", request, response);
+ if (resourceInfo != null) {
+ result.add(resourceInfo);
+ }
+ }
+
+ return result;
+ }
+
+ /**
+ * Returns the router where the {@link ServerResource} classes created from
+ * the APISpark description document are attached.
+ *
+ * @return The root router.
+ */
+ public Router getRouter() {
+ return this.router;
+ }
+
+ /**
+ * Returns the virtual host matching the APISpark application's base
+ * reference. Creates a new one and attaches it to the component if
+ * necessary.
+ *
+ * @param component
+ * The parent component.
+ * @return The related virtual host.
+ */
+ private VirtualHost getVirtualHost(Component component) {
+ // Create the virtual host if necessary
+ final String hostDomain = this.baseRef.getHostDomain();
+ final String hostPort = Integer.toString(this.baseRef.getHostPort());
+ final String hostScheme = this.baseRef.getScheme();
+
+ VirtualHost host = null;
+ for (final VirtualHost vh : component.getHosts()) {
+ if (vh.getHostDomain().equals(hostDomain)
+ && vh.getHostPort().equals(hostPort)
+ && vh.getHostScheme().equals(hostScheme)) {
+ host = vh;
+ }
+ }
+
+ if (host == null) {
+ // A new virtual host needs to be created
+ host = new VirtualHost(component.getContext().createChildContext());
+ host.setHostDomain(hostDomain);
+ host.setHostPort(hostPort);
+ host.setHostScheme(hostScheme);
+ component.getHosts().add(host);
+ }
+
+ return host;
+ }
+
+ /**
+ * Handles the requests normally in all cases then handles the special case
+ * of the OPTIONS requests that exactly target the application. In this
+ * case, the application is automatically introspected and described as a
+ * APISpark representation based on the result of the
+ * {@link #getApplicationInfo(Request, Response)} method.<br>
+ * The automatic introspection happens only if the request hasn't already
+ * been successfully handled. That is to say, it lets users provide their
+ * own handling of OPTIONS requests.
+ *
+ * @param request
+ * The request to handle.
+ * @param response
+ * The response to update.
+ */
+ @Override
+ public void handle(Request request, Response response) {
+ // Preserve the resource reference.
+ Reference rr = request.getResourceRef().clone();
+
+ // Do the regular handling
+ super.handle(request, response);
+
+ // Restore the resource reference
+ request.setResourceRef(rr);
+
+ // Handle OPTIONS requests.
+ String rp = rr.getRemainingPart(false, false);
+
+ if (canDescribe(rp, request, response)) {
+ // Make sure that the base of the "resources" element ends with a
+ // "/".
+ if (!rr.getBaseRef().getIdentifier().endsWith("/")) {
+ rr.setBaseRef(rr.getBaseRef() + "/");
+ }
+
+ // Returns a APISpark representation of the application.
+ response.setEntity(apisparkRepresent(request, response));
+
+ if (response.isEntityAvailable()) {
+ response.setStatus(Status.SUCCESS_OK);
+ }
+ }
+ }
+
+ /**
+ * Indicates if the application should be automatically described via
+ * APISpark when an OPTIONS request handles a "*" target URI.
+ *
+ * @return True if the application should be automatically described via
+ * APISpark.
+ */
+ public boolean isAutoDescribing() {
+ return autoDescribing;
+ }
+
+ /**
+ * Indicates if the application should be automatically described via
+ * APISpark when an OPTIONS request handles a "*" target URI.
+ *
+ * @param autoDescribed
+ * True if the application should be automatically described via
+ * APISpark.
+ */
+ public void setAutoDescribing(boolean autoDescribed) {
+ this.autoDescribing = autoDescribed;
+ }
+
+ /**
+ * Sets the APISpark base reference.
+ *
+ * @param baseRef
+ * The APISpark base reference.
+ */
+ public void setBaseRef(Reference baseRef) {
+ this.baseRef = baseRef;
+ }
+
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkComponent.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkComponent.java
new file mode 100644
index 0000000000..0dd6b985b1
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkComponent.java
@@ -0,0 +1,176 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import org.restlet.Component;
+import org.restlet.Request;
+import org.restlet.Response;
+import org.restlet.data.Method;
+import org.restlet.data.Reference;
+import org.restlet.representation.Representation;
+
+/**
+ * Component that can configure itself given a APISpark document. First, it
+ * creates the server connectors and the virtual hosts if needed, trying to
+ * reuse existing ones if available. Then it creates a
+ * {@link ApisparkApplication} using this
+ * {@link ApisparkApplication#APISparkApplication(Representation)} constructor.<br>
+ * <br>
+ * Concurrency note: instances of this class or its subclasses can be invoked by
+ * several threads at the same time and therefore must be thread-safe. You
+ * should be especially careful when storing state in member variables.
+ *
+ * @author Jerome Louvel
+ */
+public class ApisparkComponent extends Component {
+
+ /**
+ * Main method capable of configuring and starting a whole Restlet Component
+ * based on a list of local APISpark documents URIs, for example
+ * "file:///C:/YahooSearch.apispark".<br>
+ * <br>
+ * The necessary client connectors are automatically created.
+ *
+ * @param args
+ * List of local APISpark document URIs.
+ * @throws Exception
+ */
+ public static void main(String[] args) throws Exception {
+ // Create a new APISpark-aware component
+ final ApisparkComponent component = new ApisparkComponent();
+
+ // For each APISpark document URI attach a matching Application
+ for (final String arg : args) {
+ component.attach(arg);
+ }
+
+ // Start the component
+ component.start();
+ }
+
+ /**
+ * Default constructor.
+ */
+ public ApisparkComponent() {
+ }
+
+ /**
+ * Constructor loading a APISpark description document at a given URI.<br>
+ * <br>
+ * The necessary client connectors are automatically created.
+ *
+ * @param apisparkRef
+ * The URI reference to the APISpark description document.
+ */
+ public ApisparkComponent(Reference apisparkRef) {
+ attach(apisparkRef);
+ }
+
+ /**
+ * Constructor based on a given APISpark description document.
+ *
+ * @param apispark
+ * The APISpark description document.
+ */
+ public ApisparkComponent(Representation apispark) {
+ attach(apispark);
+ }
+
+ /**
+ * Constructor loading a APISpark description document at a given URI.<br>
+ * <br>
+ * The necessary client connectors are automatically created.
+ *
+ * @param apisparkUri
+ * The URI to the APISpark description document.
+ */
+ public ApisparkComponent(String apisparkUri) {
+ attach(apisparkUri);
+ }
+
+ /**
+ * Attaches an application created from a APISpark description document
+ * available at a given URI reference.
+ *
+ * @param apisparkRef
+ * The URI reference to the APISpark description document.
+ * @return The created APISpark application.
+ */
+ public ApisparkApplication attach(Reference apisparkRef) {
+ ApisparkApplication result = null;
+
+ // Adds some common client connectors to load the APISpark documents
+ if (!getClients().contains(apisparkRef.getSchemeProtocol())) {
+ getClients().add(apisparkRef.getSchemeProtocol());
+ }
+
+ // Get the APISpark document
+ final Response response = getContext().getClientDispatcher().handle(
+ new Request(Method.GET, apisparkRef));
+
+ if (response.getStatus().isSuccess() && response.isEntityAvailable()) {
+ result = attach(response.getEntity());
+ }
+
+ return result;
+ }
+
+ /**
+ * Attaches an application created from a APISpark description document to
+ * the component.
+ *
+ * @param apispark
+ * The APISpark description document.
+ * @return The created APISpark application.
+ */
+ public ApisparkApplication attach(Representation apispark) {
+ final ApisparkApplication result = new ApisparkApplication(getContext()
+ .createChildContext(), apispark);
+ result.attachToComponent(this);
+ return result;
+ }
+
+ /**
+ * Attaches an application created from a APISpark description document
+ * available at a given URI.
+ *
+ * @param apisparkUri
+ * The URI to the APISpark description document.
+ * @return The created APISpark application.
+ */
+ public ApisparkApplication attach(String apisparkUri) {
+ return attach(new Reference(apisparkUri));
+ }
+
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkConverter.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkConverter.java
new file mode 100644
index 0000000000..76479f8c63
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkConverter.java
@@ -0,0 +1,141 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.restlet.data.MediaType;
+import org.restlet.data.Preference;
+import org.restlet.engine.converter.ConverterHelper;
+import org.restlet.engine.resource.VariantInfo;
+import org.restlet.representation.Representation;
+import org.restlet.representation.Variant;
+import org.restlet.resource.Resource;
+
+/**
+ * A converter helper to convert between {@link ApplicationInfo} objects and
+ * {@link ApisparkRepresentation} ones.
+ *
+ * @author Thierry Boileau
+ */
+public class ApisparkConverter extends ConverterHelper {
+
+ private static final VariantInfo VARIANT_APPLICATION_SWAGGER = new VariantInfo(
+ MediaType.APPLICATION_JSON);
+
+ @Override
+ public List<Class<?>> getObjectClasses(Variant source) {
+ List<Class<?>> result = null;
+
+ if (VARIANT_APPLICATION_SWAGGER.includes(source)) {
+ result = addObjectClass(result, ApplicationInfo.class);
+ }
+
+ return result;
+ }
+
+ @Override
+ public List<VariantInfo> getVariants(Class<?> source) {
+ List<VariantInfo> result = null;
+
+ if (ApplicationInfo.class.isAssignableFrom(source)) {
+ result = addVariant(result, VARIANT_APPLICATION_SWAGGER);
+ }
+
+ return result;
+ }
+
+ @Override
+ public float score(Object source, Variant target, Resource resource) {
+ if (source instanceof ApplicationInfo) {
+ return 1.0f;
+ }
+
+ return -1.0f;
+ }
+
+ @Override
+ public <T> float score(Representation source, Class<T> target,
+ Resource resource) {
+ float result = -1.0F;
+
+ if ((source != null)
+ && (ApplicationInfo.class.isAssignableFrom(target))) {
+ result = 1.0F;
+ }
+
+ return result;
+ }
+
+ @Override
+ public <T> T toObject(Representation source, Class<T> target,
+ Resource resource) throws IOException {
+ ApisparkRepresentation apisparkSource = null;
+ if (source instanceof ApisparkRepresentation) {
+ apisparkSource = (ApisparkRepresentation) source;
+ } else {
+ // TODO
+ // apisparkSource = new APISparkRepresentation(source);
+ }
+
+ T result = null;
+ if (target != null) {
+ if (ApplicationInfo.class.isAssignableFrom(target)) {
+ result = target.cast(apisparkSource.getApplication());
+ }
+ }
+
+ return result;
+ }
+
+ @Override
+ public Representation toRepresentation(Object source, Variant target,
+ Resource resource) throws IOException {
+ if (source instanceof ApplicationInfo) {
+ return new ApisparkRepresentation((ApplicationInfo) source);
+ }
+
+ return null;
+ }
+
+ @Override
+ public <T> void updatePreferences(List<Preference<MediaType>> preferences,
+ Class<T> entity) {
+ if (ApplicationInfo.class.isAssignableFrom(entity)) {
+ updatePreferences(preferences, MediaType.APPLICATION_JSON, 1.0F);
+ updatePreferences(preferences, MediaType.APPLICATION_XML, 0.9F);
+ }
+ }
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkDescribable.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkDescribable.java
new file mode 100644
index 0000000000..6de8f3a68d
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkDescribable.java
@@ -0,0 +1,60 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import org.restlet.resource.Directory;
+import org.restlet.resource.ServerResource;
+
+/**
+ * Interface that any Restlet can implement in order to provide their own
+ * APISpark documentation. This is especially useful for subclasses of
+ * {@link Directory} or other resource finders when the APISpark introspection
+ * can't reach {@link ServerResource} or better {@link ApisparkServerResource}
+ * instances.
+ *
+ * @author Thierry Boileau
+ */
+public interface ApisparkDescribable {
+
+ /**
+ * Returns a full documented {@link ResourceInfo} instance.
+ *
+ * @param applicationInfo
+ * The parent APISpark application descriptor.
+ *
+ * @return A full documented {@link ResourceInfo} instance.
+ */
+ public ResourceInfo getResourceInfo(ApplicationInfo applicationInfo);
+
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkRepresentation.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkRepresentation.java
new file mode 100644
index 0000000000..2a5750efde
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkRepresentation.java
@@ -0,0 +1,311 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.restlet.Server;
+import org.restlet.data.Protocol;
+import org.restlet.data.Status;
+import org.restlet.engine.Engine;
+import org.restlet.engine.connector.ConnectorHelper;
+import org.restlet.ext.apispark.Body;
+import org.restlet.ext.apispark.Contract;
+import org.restlet.ext.apispark.Documentation;
+import org.restlet.ext.apispark.Method;
+import org.restlet.ext.apispark.Operation;
+import org.restlet.ext.apispark.Parameter;
+import org.restlet.ext.apispark.PathVariable;
+import org.restlet.ext.apispark.Property;
+import org.restlet.ext.apispark.Representation;
+import org.restlet.ext.apispark.Resource;
+import org.restlet.ext.apispark.Response;
+import org.restlet.ext.apispark.Variant;
+import org.restlet.ext.jackson.JacksonRepresentation;
+
+/**
+ * Root of a APISpark description document.<br>
+ *
+ * @author Jerome Louvel
+ */
+public class ApisparkRepresentation extends
+ JacksonRepresentation<Documentation> {
+
+ private static Documentation toDocumentation(ApplicationInfo application) {
+ Documentation result = null;
+ if (application != null) {
+ result = new Documentation();
+ result.setVersion(application.getVersion());
+
+ Contract contract = new Contract();
+ result.setContract(contract);
+ contract.setDescription(toString(application.getDocumentations()));
+ contract.setName(application.getName());
+
+ // List of representations.
+ contract.setRepresentations(new ArrayList<Representation>());
+ for (RepresentationInfo ri : application.getRepresentations()) {
+ Representation rep = new Representation();
+
+ // TODO analyze
+ // The models differ : one representation / one variant for
+ // Restlet
+ // one representation / several variants for APIspark
+ rep.setDescription(toString(ri.getDocumentations()));
+ rep.setName(ri.getIdentifier());
+ Variant variant = new Variant();
+ variant.setDataType(ri.getMediaType().getName());
+ rep.setVariants(new ArrayList<Variant>());
+ rep.getVariants().add(variant);
+
+ rep.setProperties(new ArrayList<Property>());
+ for (int i = 0; i < ri.getParameters().size(); i++) {
+ ParameterInfo pi = ri.getParameters().get(i);
+
+ Property property = new Property();
+ property.setName(pi.getName());
+ property.setDescription(toString(pi.getDocumentations()));
+ property.setType(pi.getType());
+
+ rep.getProperties().add(property);
+ }
+
+ contract.getRepresentations().add(rep);
+ }
+
+ // List of resources.
+ // TODO Resource path/basePath?
+ contract.setResources(new ArrayList<Resource>());
+ for (ResourceInfo ri : application.getResources().getResources()) {
+
+ Resource resource = new Resource();
+ resource.setDescription(toString(ri.getDocumentations()));
+ resource.setName(ri.getIdentifier());
+ resource.setResourcePath(ri.getPath());
+
+ resource.setOperations(new ArrayList<Operation>());
+ int i = 0;
+ for (MethodInfo mi : ri.getMethods()) {
+
+ Operation operation = new Operation();
+ operation.setDescription(toString(mi.getDocumentations()));
+ operation.setName(mi.getName().getName());
+ // TODO complete Method class with mi.getName()
+ operation.setMethod(new Method());
+ operation.getMethod().setDescription(mi.getName().getDescription());
+ operation.getMethod().setName(mi.getName().getName());
+
+ // Complete parameters
+ operation.setHeaders(new ArrayList<Parameter>());
+ operation.setPathVariables(new ArrayList<PathVariable>());
+ operation.setQueryParameters(new ArrayList<Parameter>());
+ if (mi.getRequest() != null
+ && mi.getRequest().getParameters() != null) {
+ for (ParameterInfo pi : mi.getRequest().getParameters()) {
+ if (ParameterStyle.HEADER.equals(pi.getStyle())) {
+ Parameter parameter = new Parameter();
+ parameter.setAllowMultiple(pi.isRepeating());
+ parameter.setDefaultValue(pi.getDefaultValue());
+ parameter.setDescription(toString(pi
+ .getDocumentations()));
+ parameter.setName(pi.getName());
+ parameter
+ .setPossibleValues(new ArrayList<String>());
+ parameter.setRequired(pi.isRequired());
+
+ operation.getHeaders().add(parameter);
+ } else if (ParameterStyle.TEMPLATE.equals(pi
+ .getStyle())) {
+ PathVariable pathVariable = new PathVariable();
+
+ pathVariable.setDescription(toString(pi
+ .getDocumentations()));
+ pathVariable.setName(pi.getName());
+
+ operation.getPathVariables().add(pathVariable);
+ } else if (ParameterStyle.QUERY.equals(pi
+ .getStyle())) {
+ Parameter parameter = new Parameter();
+ parameter.setAllowMultiple(pi.isRepeating());
+ parameter.setDefaultValue(pi.getDefaultValue());
+ parameter.setDescription(toString(pi
+ .getDocumentations()));
+ parameter.setName(pi.getName());
+ parameter
+ .setPossibleValues(new ArrayList<String>());
+ parameter.setRequired(pi.isRequired());
+
+ operation.getHeaders().add(parameter);
+ }
+ }
+ }
+
+ if (mi.getRequest() != null
+ && mi.getRequest().getRepresentations() != null
+ && !mi.getRequest().getRepresentations().isEmpty()) {
+ Body body = new Body();
+ // TODO analyze
+ // The models differ : one representation / one variant
+ // for Restlet one representation / several variants for
+ // APIspark
+ body.setRepresentation(mi.getRequest()
+ .getRepresentations().get(0).getIdentifier());
+
+ operation.setInRepresentation(body);
+ }
+
+ if (mi.getResponses() != null
+ && !mi.getResponses().isEmpty()) {
+ operation.setResponses(new ArrayList<Response>());
+
+ Body body = new Body();
+ // TODO analyze
+ // The models differ : one representation / one variant
+ // for Restlet one representation / several variants for
+ // APIspark
+
+ operation.setOutRepresentation(body);
+
+ for (ResponseInfo rio : mi.getResponses()) {
+ if (!rio.getStatuses().isEmpty()) {
+ Status status = rio.getStatuses().get(0);
+ // TODO analyze
+ // The models differ : one representation / one variant
+ // for Restlet one representation / several variants for
+ // APIspark
+
+ Response response = new Response();
+ response.setBody(body);
+ response.setCode(status.getCode());
+ response.setDescription(toString(rio.getDocumentations()));
+ response.setMessage(status.getDescription());
+ //response.setName();
+
+ operation.getResponses().add(response);
+ }
+ }
+ }
+
+ resource.getOperations().add(operation);
+ }
+
+ contract.getResources().add(resource);
+ }
+
+ java.util.List<String> protocols = new ArrayList<String>();
+ for (ConnectorHelper<Server> helper : Engine.getInstance()
+ .getRegisteredServers()) {
+ for (Protocol protocol : helper.getProtocols()) {
+ if (!protocols.contains(protocol.getName())) {
+ protocols.add(protocol.getName());
+ }
+ }
+ }
+
+ }
+ return result;
+ }
+
+ private static String toString(List<DocumentationInfo> di) {
+ StringBuilder d = new StringBuilder();
+ for (DocumentationInfo doc : di) {
+ d.append(doc.getTextContent());
+ }
+ return d.toString();
+ }
+
+ /** The root element of the APISpark document. */
+ private ApplicationInfo application;
+
+ /**
+ * Constructor.
+ *
+ * @param application
+ * The root element of the APISpark document.
+ */
+ public ApisparkRepresentation(ApplicationInfo application) {
+ super(toDocumentation(application));
+
+ this.application = application;
+ }
+
+ /**
+ * Constructor.
+ *
+ * @param documentation
+ * The description of the APISpark document.
+ */
+ public ApisparkRepresentation(Documentation documentation) {
+ super(documentation);
+ // Transform contract to ApplicationInfo
+ }
+
+ // /**
+ // * Constructor.
+ // *
+ // * @param representation
+ // * The XML APISpark document.
+ // * @throws IOException
+ // */
+ // public APISparkRepresentation(Representation representation)
+ // throws IOException {
+ // super(representation);
+ // setMediaType(MediaType.APPLICATION_JSON);
+ //
+ // // Parse the given document using SAX to produce an ApplicationInfo
+ // // instance.
+ // // parse(new ContentReader(this));
+ // }
+
+ /**
+ * Returns the root element of the APISpark document.
+ *
+ * @return The root element of the APISpark document.
+ */
+ public ApplicationInfo getApplication() {
+ return this.application;
+ }
+
+ /**
+ * Sets the root element of the APISpark document.
+ *
+ * @param application
+ * The root element of the APISpark document.
+ */
+ public void setApplication(ApplicationInfo application) {
+ this.application = application;
+ }
+
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkServerResource.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkServerResource.java
new file mode 100644
index 0000000000..a4ca8787d2
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkServerResource.java
@@ -0,0 +1,591 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.restlet.data.Header;
+import org.restlet.data.MediaType;
+import org.restlet.data.Method;
+import org.restlet.data.Parameter;
+import org.restlet.data.Reference;
+import org.restlet.engine.header.HeaderConstants;
+import org.restlet.representation.Representation;
+import org.restlet.representation.Variant;
+import org.restlet.resource.ResourceException;
+import org.restlet.resource.ServerResource;
+import org.restlet.util.NamedValue;
+import org.restlet.util.Series;
+
+/**
+ * Resource that is able to automatically describe itself with APISpark. This
+ * description can be customized by overriding the {@link #describe()} and
+ * {@link #describeMethod(Method, MethodInfo)} methods.<br>
+ * <br>
+ * When used to describe a class of resources in the context of a parent
+ * application, a special instance will be created using the default constructor
+ * (with no request, response associated). In this case, the resource should do
+ * its best to return the generic information when the APISpark description
+ * methods are invoked, like {@link #describe()} and delegate methods.
+ *
+ * @author Jerome Louvel
+ */
+public class ApisparkServerResource extends ServerResource {
+
+ /**
+ * Indicates if the resource should be automatically described via APISpark
+ * when an OPTIONS request is handled.
+ */
+ private volatile boolean autoDescribing;
+
+ /**
+ * The description of this documented resource. Is seen as the text content
+ * of the "doc" tag of the "resource" element in a APISpark document.
+ */
+ private volatile String description;
+
+ /**
+ * The name of this documented resource. Is seen as the title of the "doc"
+ * tag of the "resource" element in a APISpark document.
+ */
+ private volatile String name;
+
+ /**
+ * Constructor.
+ */
+ public ApisparkServerResource() {
+ this.autoDescribing = true;
+ }
+
+ /**
+ * Indicates if the given method exposes its APISpark description. By
+ * default, HEAD and OPTIONS are not exposed. This method is called by
+ * {@link #describe(String, ResourceInfo)}.
+ *
+ * @param method
+ * The method
+ * @return True if the method exposes its description, false otherwise.
+ */
+ public boolean canDescribe(Method method) {
+ return !(Method.HEAD.equals(method) || Method.OPTIONS.equals(method));
+ }
+
+ /**
+ * Creates a new APISpark representation for a given {@link ApplicationInfo}
+ * instance describing an application.
+ *
+ * @param applicationInfo
+ * The application description.
+ * @return The created {@link ApisparkRepresentation}.
+ */
+ protected Representation createAPISparkRepresentation(
+ ApplicationInfo applicationInfo) {
+ return new ApisparkRepresentation(applicationInfo);
+ }
+
+ /**
+ * Describes the resource as a standalone APISpark document.
+ *
+ * @return The APISpark description.
+ */
+ protected Representation describe() {
+ return describe(getPreferredAPISparkVariant());
+ }
+
+ /**
+ * Updates the description of the parent application. This is typically used
+ * to add documentation on global representations used by several methods or
+ * resources. Does nothing by default.
+ *
+ * @param applicationInfo
+ * The parent application.
+ */
+ protected void describe(ApplicationInfo applicationInfo) {
+ }
+
+ /**
+ * Describes a representation class and variant couple as APISpark
+ * information. The variant contains the target media type that can be
+ * converted to by one of the available Restlet converters.
+ *
+ * @param methodInfo
+ * The parent method description.
+ * @param representationClass
+ * The representation bean class.
+ * @param variant
+ * The target variant.
+ * @return The APISpark representation information.
+ */
+ protected RepresentationInfo describe(MethodInfo methodInfo,
+ Class<?> representationClass, Variant variant) {
+ return new RepresentationInfo(variant);
+ }
+
+ /**
+ * Describes a representation class and variant couple as APISpark
+ * information for the given method and request. The variant contains the
+ * target media type that can be converted to by one of the available
+ * Restlet converters.<br>
+ * <br>
+ * By default, it calls {@link #describe(MethodInfo, Class, Variant)}.
+ *
+ * @param methodInfo
+ * The parent method description.
+ * @param requestInfo
+ * The parent request description.
+ * @param representationClass
+ * The representation bean class.
+ * @param variant
+ * The target variant.
+ * @return The APISpark representation information.
+ */
+ protected RepresentationInfo describe(MethodInfo methodInfo,
+ RequestInfo requestInfo, Class<?> representationClass,
+ Variant variant) {
+ return describe(methodInfo, representationClass, variant);
+ }
+
+ /**
+ * Describes a representation class and variant couple as APISpark
+ * information for the given method and response. The variant contains the
+ * target media type that can be converted to by one of the available
+ * Restlet converters.<br>
+ * <br>
+ * By default, it calls {@link #describe(MethodInfo, Class, Variant)}.
+ *
+ * @param methodInfo
+ * The parent method description.
+ * @param responseInfo
+ * The parent response description.
+ * @param representationClass
+ * The representation bean class.
+ * @param variant
+ * The target variant.
+ * @return The APISpark representation information.
+ */
+ protected RepresentationInfo describe(MethodInfo methodInfo,
+ ResponseInfo responseInfo, Class<?> representationClass,
+ Variant variant) {
+ return describe(methodInfo, representationClass, variant);
+ }
+
+ /**
+ * Returns a APISpark description of the current resource, leveraging the
+ * {@link #getResourcePath()} method.
+ *
+ * @param info
+ * APISpark description of the current resource to update.
+ */
+ public void describe(ResourceInfo info) {
+ describe(getResourcePath(), info);
+ }
+
+ /**
+ * Returns a APISpark description of the current resource.
+ *
+ * @param path
+ * Path of the current resource.
+ * @param info
+ * APISpark description of the current resource to update.
+ */
+ public void describe(String path, ResourceInfo info) {
+ ResourceInfo.describe(null, info, this, path);
+ }
+
+ /**
+ * Describes the resource as a APISpark document for the given variant.
+ *
+ * @param variant
+ * The APISpark variant.
+ * @return The APISpark description.
+ */
+ protected Representation describe(Variant variant) {
+ Representation result = null;
+
+ if (variant != null) {
+ ResourceInfo resource = new ResourceInfo();
+ describe(resource);
+ ApplicationInfo application = resource.createApplication();
+ describe(application);
+
+ if (MediaType.APPLICATION_JSON.equals(variant.getMediaType())) {
+ result = createAPISparkRepresentation(application);
+ } else if (MediaType.APPLICATION_XML.equals(variant.getMediaType())) {
+ result = createAPISparkRepresentation(application);
+ } else if (MediaType.TEXT_XML.equals(variant.getMediaType())) {
+ result = createAPISparkRepresentation(application);
+ }
+ }
+
+ return result;
+ }
+
+ /**
+ * Describes the DELETE method.
+ *
+ * @param info
+ * The method description to update.
+ */
+ protected void describeDelete(MethodInfo info) {
+ MethodInfo.describeAnnotations(info, this);
+ }
+
+ /**
+ * Describes the GET method.<br>
+ * By default, it describes the response with the available variants based
+ * on the {@link #getVariants()} method. Thus in the majority of cases, the
+ * method of the super class must be called when overridden.
+ *
+ * @param info
+ * The method description to update.
+ */
+ protected void describeGet(MethodInfo info) {
+ MethodInfo.describeAnnotations(info, this);
+ }
+
+ /**
+ * Returns a APISpark description of the current method.
+ *
+ * @return A APISpark description of the current method.
+ */
+ protected MethodInfo describeMethod() {
+ MethodInfo result = new MethodInfo();
+ describeMethod(getMethod(), result);
+ return result;
+ }
+
+ /**
+ * Returns a APISpark description of the given method.
+ *
+ * @param method
+ * The method to describe.
+ * @param info
+ * The method description to update.
+ */
+ protected void describeMethod(Method method, MethodInfo info) {
+ info.setName(method);
+
+ if (Method.GET.equals(method)) {
+ describeGet(info);
+ } else if (Method.POST.equals(method)) {
+ describePost(info);
+ } else if (Method.PUT.equals(method)) {
+ describePut(info);
+ } else if (Method.DELETE.equals(method)) {
+ describeDelete(info);
+ } else if (Method.OPTIONS.equals(method)) {
+ describeOptions(info);
+ } else if (Method.PATCH.equals(method)) {
+ describePatch(info);
+ }
+ }
+
+ /**
+ * Describes the OPTIONS method.<br>
+ * By default it describes the response with the available variants based on
+ * the {@link #getAPISparkVariants()} method.
+ *
+ * @param info
+ * The method description to update.
+ */
+ protected void describeOptions(MethodInfo info) {
+ // Describe each variant
+ for (Variant variant : getAPISparkVariants()) {
+ RepresentationInfo result = new RepresentationInfo(variant);
+ info.getResponse().getRepresentations().add(result);
+ }
+ }
+
+ /**
+ * Returns the description of the parameters of this resource. Returns null
+ * by default.
+ *
+ * @return The description of the parameters.
+ */
+ protected List<ParameterInfo> describeParameters() {
+ return null;
+ }
+
+ /**
+ * Describes the Patch method.
+ *
+ * @param info
+ * The method description to update.
+ */
+ protected void describePatch(MethodInfo info) {
+ MethodInfo.describeAnnotations(info, this);
+ }
+
+ /**
+ * Describes the POST method.
+ *
+ * @param info
+ * The method description to update.
+ */
+ protected void describePost(MethodInfo info) {
+ MethodInfo.describeAnnotations(info, this);
+ }
+
+ /**
+ * Describes the PUT method.
+ *
+ * @param info
+ * The method description to update.
+ */
+ protected void describePut(MethodInfo info) {
+ MethodInfo.describeAnnotations(info, this);
+ }
+
+ @Override
+ protected void doInit() throws ResourceException {
+ super.doInit();
+ this.autoDescribing = true;
+ }
+
+ /**
+ * Returns the available APISpark variants.
+ *
+ * @return The available APISpark variants.
+ */
+ protected List<Variant> getAPISparkVariants() {
+ List<Variant> result = new ArrayList<Variant>();
+ result.add(new Variant(MediaType.APPLICATION_JSON));
+ result.add(new Variant(MediaType.TEXT_HTML));
+ return result;
+ }
+
+ /**
+ * Returns the description of this documented resource. Is seen as the text
+ * content of the "doc" tag of the "resource" element in a APISpark
+ * document.
+ *
+ * @return The description of this documented resource.
+ */
+ public String getDescription() {
+ return description;
+ }
+
+ /**
+ * Returns the set of headers as a collection of {@link Parameter} objects.
+ *
+ * @return The set of headers as a collection of {@link Parameter} objects.
+ */
+ @SuppressWarnings("unchecked")
+ private Series<Header> getHeaders() {
+ return (Series<Header>) getRequestAttributes().get(
+ HeaderConstants.ATTRIBUTE_HEADERS);
+ }
+
+ /**
+ * Returns the name of this documented resource. Is seen as the title of the
+ * "doc" tag of the "resource" element in a APISpark document.
+ *
+ * @return The name of this documented resource.
+ */
+ public String getName() {
+ return name;
+ }
+
+ /**
+ * Returns the first parameter found in the current context (entity, query,
+ * headers, etc) with the given name.
+ *
+ * @param name
+ * The parameter name.
+ * @return The first parameter found with the given name.
+ */
+ protected NamedValue<String> getParameter(String name) {
+ NamedValue<String> result = null;
+ Series<? extends NamedValue<String>> set = getParameters(name);
+
+ if (set != null) {
+ result = set.getFirst(name);
+ }
+
+ return result;
+ }
+
+ /**
+ * Returns a collection of parameters objects contained in the current
+ * context (entity, query, headers, etc) given a ParameterInfo instance.
+ *
+ * @param parameterInfo
+ * The ParameterInfo instance.
+ * @return A collection of parameters objects
+ */
+ private Series<? extends NamedValue<String>> getParameters(
+ ParameterInfo parameterInfo) {
+ Series<? extends NamedValue<String>> result = null;
+
+ if (parameterInfo.getFixed() != null) {
+ result = new Series<Parameter>(Parameter.class);
+ result.add(parameterInfo.getName(), parameterInfo.getFixed());
+ } else if (ParameterStyle.HEADER.equals(parameterInfo.getStyle())) {
+ result = getHeaders().subList(parameterInfo.getName());
+ } else if (ParameterStyle.TEMPLATE.equals(parameterInfo.getStyle())) {
+ Object parameter = getRequest().getAttributes().get(
+ parameterInfo.getName());
+
+ if (parameter != null) {
+ result = new Series<Parameter>(Parameter.class);
+ result.add(parameterInfo.getName(),
+ Reference.decode((String) parameter));
+ }
+ } else if (ParameterStyle.MATRIX.equals(parameterInfo.getStyle())) {
+ result = getMatrix().subList(parameterInfo.getName());
+ } else if (ParameterStyle.QUERY.equals(parameterInfo.getStyle())) {
+ result = getQuery().subList(parameterInfo.getName());
+ } else if (ParameterStyle.PLAIN.equals(parameterInfo.getStyle())) {
+ // TODO not yet implemented.
+ }
+
+ if (result == null && parameterInfo.getDefaultValue() != null) {
+ result = new Series<Parameter>(Parameter.class);
+ result.add(parameterInfo.getName(), parameterInfo.getDefaultValue());
+ }
+
+ return result;
+ }
+
+ /**
+ * Returns a collection of parameters found in the current context (entity,
+ * query, headers, etc) given a parameter name. It returns null if the
+ * parameter name is unknown.
+ *
+ * @param name
+ * The name of the parameter.
+ * @return A collection of parameters.
+ */
+ protected Series<? extends NamedValue<String>> getParameters(String name) {
+ Series<? extends NamedValue<String>> result = null;
+
+ if (describeParameters() != null) {
+ for (ParameterInfo parameter : describeParameters()) {
+ if (name.equals(parameter.getName())) {
+ result = getParameters(parameter);
+ }
+ }
+ }
+
+ return result;
+ }
+
+ /**
+ * Returns the preferred APISpark variant according to the client
+ * preferences specified in the request.
+ *
+ * @return The preferred APISpark variant.
+ */
+ protected Variant getPreferredAPISparkVariant() {
+ return getConnegService().getPreferredVariant(getAPISparkVariants(),
+ getRequest(), getMetadataService());
+ }
+
+ /**
+ * Returns the resource's relative path.
+ *
+ * @return The resource's relative path.
+ */
+ protected String getResourcePath() {
+ Reference ref = new Reference(getRequest().getRootRef(), getRequest()
+ .getResourceRef());
+ return ref.getRemainingPart();
+ }
+
+ /**
+ * Returns the application resources base URI.
+ *
+ * @return The application resources base URI.
+ */
+ protected Reference getResourcesBase() {
+ return getRequest().getRootRef();
+ }
+
+ /**
+ * Indicates if the resource should be automatically described via APISpark
+ * when an OPTIONS request is handled.
+ *
+ * @return True if the resource should be automatically described via
+ * APISpark.
+ */
+ public boolean isAutoDescribing() {
+ return this.autoDescribing;
+ }
+
+ @Override
+ public Representation options() {
+ if (isAutoDescribing()) {
+ return describe();
+ }
+
+ return null;
+ }
+
+ /**
+ * Indicates if the resource should be automatically described via APISpark
+ * when an OPTIONS request is handled.
+ *
+ * @param autoDescribed
+ * True if the resource should be automatically described via
+ * APISpark.
+ */
+ public void setAutoDescribing(boolean autoDescribed) {
+ this.autoDescribing = autoDescribed;
+ }
+
+ /**
+ * Sets the description of this documented resource. Is seen as the text
+ * content of the "doc" tag of the "resource" element in a APISpark
+ * document.
+ *
+ * @param description
+ * The description of this documented resource.
+ */
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ /**
+ * Sets the name of this documented resource. Is seen as the title of the
+ * "doc" tag of the "resource" element in a APISpark document.
+ *
+ * @param name
+ * The name of this documented resource.
+ */
+ public void setName(String name) {
+ this.name = name;
+ }
+
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkWrapper.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkWrapper.java
new file mode 100644
index 0000000000..fd1e6d5e32
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApisparkWrapper.java
@@ -0,0 +1,82 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import org.restlet.Restlet;
+import org.restlet.resource.Directory;
+import org.restlet.util.WrapperRestlet;
+
+/**
+ * APISpark wrapper for {@link Restlet} instances. Useful if you need to provide
+ * the APISpark documentation for instances of classes such as {@link Directory}
+ * .
+ *
+ * @author Thierry Boileau
+ */
+public abstract class ApisparkWrapper extends WrapperRestlet implements
+ ApisparkDescribable {
+
+ /** The description of the wrapped Restlet. */
+ private ResourceInfo resourceInfo;
+
+ /**
+ * Constructor.
+ *
+ * @param wrappedRestlet
+ * The Restlet to wrap.
+ */
+ public ApisparkWrapper(Restlet wrappedRestlet) {
+ super(wrappedRestlet);
+ }
+
+ /**
+ * Returns the description of the wrapped Restlet.
+ *
+ * @return The ResourceInfo object of the wrapped Restlet.
+ */
+ public ResourceInfo getResourceInfo() {
+ return this.resourceInfo;
+ }
+
+ /**
+ * Sets the description of the wrapped Restlet.
+ *
+ * @param resourceInfo
+ * The ResourceInfo object of the wrapped Restlet.
+ */
+ public void setResourceInfo(ResourceInfo resourceInfo) {
+ this.resourceInfo = resourceInfo;
+ }
+
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApplicationInfo.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApplicationInfo.java
new file mode 100644
index 0000000000..878b63e0a9
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ApplicationInfo.java
@@ -0,0 +1,247 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Root of a APISpark description document.
+ *
+ * @author Jerome Louvel
+ */
+public class ApplicationInfo extends DocumentedInfo {
+
+ /** List of methods. */
+ private List<MethodInfo> methods;
+
+ /** Name. */
+ private String name;
+
+ /** List of representations. */
+ private List<RepresentationInfo> representations;
+
+ /** Resources provided by the application. */
+ private ResourcesInfo resources;
+
+ /**
+ * Describes a set of methods that define the behavior of a type of
+ * resource.
+ */
+ private List<ResourceTypeInfo> resourceTypes;
+
+ /** The version of the Application. */
+ private String version;
+
+ /**
+ * Constructor.
+ */
+ public ApplicationInfo() {
+ super();
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public ApplicationInfo(DocumentationInfo documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Constructor with a list of documentation elements.
+ *
+ * @param documentations
+ * The list of documentation elements.
+ */
+ public ApplicationInfo(List<DocumentationInfo> documentations) {
+ super(documentations);
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public ApplicationInfo(String documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Returns the list of method elements.
+ *
+ * @return The list of method elements.
+ */
+ public List<MethodInfo> getMethods() {
+ // Lazy initialization with double-check.
+ List<MethodInfo> m = this.methods;
+ if (m == null) {
+ synchronized (this) {
+ m = this.methods;
+ if (m == null) {
+ this.methods = m = new ArrayList<MethodInfo>();
+ }
+ }
+ }
+ return m;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ /**
+ * Returns the list of representation elements.
+ *
+ * @return The list of representation elements.
+ */
+ public List<RepresentationInfo> getRepresentations() {
+ // Lazy initialization with double-check.
+ List<RepresentationInfo> r = this.representations;
+ if (r == null) {
+ synchronized (this) {
+ r = this.representations;
+ if (r == null) {
+ this.representations = r = new ArrayList<RepresentationInfo>();
+ }
+ }
+ }
+ return r;
+ }
+
+ /**
+ * Returns the resources root element.
+ *
+ * @return The resources root element.
+ */
+ public ResourcesInfo getResources() {
+ // Lazy initialization with double-check.
+ ResourcesInfo r = this.resources;
+ if (r == null) {
+ synchronized (this) {
+ r = this.resources;
+ if (r == null) {
+ this.resources = r = new ResourcesInfo();
+ }
+ }
+ }
+ return r;
+ }
+
+ /**
+ * Returns the list of resource type elements.
+ *
+ * @return The list of resource type elements.
+ */
+ public List<ResourceTypeInfo> getResourceTypes() {
+ // Lazy initialization with double-check.
+ List<ResourceTypeInfo> rt = this.resourceTypes;
+ if (rt == null) {
+ synchronized (this) {
+ rt = this.resourceTypes;
+ if (rt == null) {
+ this.resourceTypes = rt = new ArrayList<ResourceTypeInfo>();
+ }
+ }
+ }
+ return rt;
+ }
+
+ /**
+ * Returns the version of the Application.
+ *
+ * @return The version of the Application.
+ */
+ public String getVersion() {
+ return version;
+ }
+
+ /**
+ * Sets the list of documentation elements.
+ *
+ * @param methods
+ * The list of method elements.
+ */
+ public void setMethods(List<MethodInfo> methods) {
+ this.methods = methods;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ /**
+ * Sets the list of representation elements.
+ *
+ * @param representations
+ * The list of representation elements.
+ */
+ public void setRepresentations(List<RepresentationInfo> representations) {
+ this.representations = representations;
+ }
+
+ /**
+ * Sets the list of resource elements.
+ *
+ * @param resources
+ * The list of resource elements.
+ */
+ public void setResources(ResourcesInfo resources) {
+ this.resources = resources;
+ }
+
+ /**
+ * Sets the list of resource type elements.
+ *
+ * @param resourceTypes
+ * The list of resource type elements.
+ */
+ public void setResourceTypes(List<ResourceTypeInfo> resourceTypes) {
+ this.resourceTypes = resourceTypes;
+ }
+
+ /**
+ * Sets the version of the Application.
+ *
+ * @param version
+ * The version of the Application.
+ */
+ public void setVersion(String version) {
+ this.version = version;
+ }
+
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/DocumentationInfo.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/DocumentationInfo.java
new file mode 100644
index 0000000000..0b0ec2aa0b
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/DocumentationInfo.java
@@ -0,0 +1,129 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import org.restlet.data.Language;
+
+/**
+ * Document APISpark description elements.
+ *
+ * @author Jerome Louvel
+ */
+public class DocumentationInfo {
+
+ /** The language of that documentation element. */
+ private Language language;
+
+ /** The content as a String. */
+ private String textContent;
+
+ /** The title of that documentation element. */
+ private String title;
+
+ /**
+ * Constructor.
+ */
+ public DocumentationInfo() {
+ super();
+ }
+
+ /**
+ * Constructor with text content.
+ *
+ * @param textContent
+ * The text content.
+ */
+ public DocumentationInfo(String textContent) {
+ super();
+ setTextContent(textContent);
+ }
+
+ /**
+ * Returns the language of that documentation element.
+ *
+ * @return The language of this documentation element.
+ */
+ public Language getLanguage() {
+ return this.language;
+ }
+
+ /**
+ * Returns the language of that documentation element.
+ *
+ * @return The content of that element as text.
+ */
+ public String getTextContent() {
+ return this.textContent;
+ }
+
+ /**
+ * Returns the title of that documentation element.
+ *
+ * @return The title of that documentation element.
+ */
+ public String getTitle() {
+ return this.title;
+ }
+
+ /**
+ * The language of that documentation element.
+ *
+ * @param language
+ * The language of that documentation element.
+ */
+ public void setLanguage(Language language) {
+ this.language = language;
+ }
+
+ /**
+ * Sets the content of that element as text.
+ *
+ * @param textContent
+ * The content of that element as text.
+ */
+ public void setTextContent(String textContent) {
+ this.textContent = textContent;
+ }
+
+ /**
+ * Sets the title of that documentation element.
+ *
+ * @param title
+ * The title of that documentation element.
+ */
+ public void setTitle(String title) {
+ this.title = title;
+ }
+
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/DocumentedInfo.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/DocumentedInfo.java
new file mode 100644
index 0000000000..1663f701fd
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/DocumentedInfo.java
@@ -0,0 +1,136 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Superclass of APISpark elements that supports dcumentation.
+ *
+ */
+public abstract class DocumentedInfo {
+ /** Doc elements used to document that element. */
+ private List<DocumentationInfo> documentations;
+
+ /**
+ * Constructor.
+ */
+ public DocumentedInfo() {
+ super();
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public DocumentedInfo(DocumentationInfo documentation) {
+ super();
+ getDocumentations().add(documentation);
+ }
+
+ /**
+ * Constructor with a list of documentation elements.
+ *
+ * @param documentations
+ * The list of documentation elements.
+ */
+ public DocumentedInfo(List<DocumentationInfo> documentations) {
+ super();
+ this.documentations = documentations;
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public DocumentedInfo(String documentation) {
+ this(new DocumentationInfo(documentation));
+ }
+
+ /**
+ * Returns the list of documentation elements.
+ *
+ * @return The list of documentation elements.
+ */
+ public List<DocumentationInfo> getDocumentations() {
+ // Lazy initialization with double-check.
+ List<DocumentationInfo> d = this.documentations;
+ if (d == null) {
+ synchronized (this) {
+ d = this.documentations;
+ if (d == null) {
+ this.documentations = d = new ArrayList<DocumentationInfo>();
+ }
+ }
+ }
+ return d;
+ }
+
+ /**
+ * Set the list of documentation elements with a single element.
+ *
+ * @param documentationInfo
+ * A single documentation element.
+ */
+ public void setDocumentation(DocumentationInfo documentationInfo) {
+ getDocumentations().clear();
+ getDocumentations().add(documentationInfo);
+ }
+
+ /**
+ * Set the list of documentation elements with a single element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public void setDocumentation(String documentation) {
+ getDocumentations().clear();
+ getDocumentations().add(new DocumentationInfo(documentation));
+ }
+
+ /**
+ * Sets the list of documentation elements.
+ *
+ * @param doc
+ * The list of documentation elements.
+ */
+ public void setDocumentations(List<DocumentationInfo> doc) {
+ this.documentations = doc;
+ }
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/LinkInfo.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/LinkInfo.java
new file mode 100644
index 0000000000..d38a1960c0
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/LinkInfo.java
@@ -0,0 +1,157 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import java.util.List;
+
+import org.restlet.data.Reference;
+
+/**
+ * Allows description of links between representations and resources.
+ *
+ * @author Jerome Louvel
+ */
+public class LinkInfo extends DocumentedInfo {
+ /**
+ * Identifies the relationship of the resource identified by the link to the
+ * resource whose representation the link is embedded in.
+ */
+ private String relationship;
+
+ /**
+ * Defines the capabilities of the resource that the link identifies.
+ */
+ private Reference resourceType;
+
+ /**
+ * Identifies the relationship of the resource whose representation the link
+ * is embedded in to the resource identified by the link.
+ */
+ private String reverseRelationship;
+
+ /**
+ * Constructor.
+ */
+ public LinkInfo() {
+ super();
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public LinkInfo(DocumentationInfo documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Constructor with a list of documentation elements.
+ *
+ * @param documentations
+ * The list of documentation elements.
+ */
+ public LinkInfo(List<DocumentationInfo> documentations) {
+ super(documentations);
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public LinkInfo(String documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Returns the relationship attribute value.
+ *
+ * @return The relationship attribute value.
+ */
+ public String getRelationship() {
+ return this.relationship;
+ }
+
+ /**
+ * Returns the reference to the resource type of the linked resource.
+ *
+ * @return The reference to the resource type of the linked resource.
+ */
+ public Reference getResourceType() {
+ return this.resourceType;
+ }
+
+ /**
+ * Returns the reverse relationship attribute value.
+ *
+ * @return The reverse relationship attribute value.
+ */
+ public String getReverseRelationship() {
+ return this.reverseRelationship;
+ }
+
+ /**
+ * Sets the relationship attribute value.
+ *
+ * @param relationship
+ * The relationship attribute value.
+ */
+ public void setRelationship(String relationship) {
+ this.relationship = relationship;
+ }
+
+ /**
+ * Sets the reference to the resource type of the linked resource.
+ *
+ * @param resourceType
+ * The reference to the resource type of the linked resource.
+ */
+ public void setResourceType(Reference resourceType) {
+ this.resourceType = resourceType;
+ }
+
+ /**
+ * Sets the reverse relationship attribute value.
+ *
+ * @param reverseRelationship
+ * The reverse relationship attribute value.
+ */
+ public void setReverseRelationship(String reverseRelationship) {
+ this.reverseRelationship = reverseRelationship;
+ }
+
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/MethodInfo.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/MethodInfo.java
new file mode 100644
index 0000000000..efaed5a253
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/MethodInfo.java
@@ -0,0 +1,328 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.restlet.data.Method;
+import org.restlet.data.Reference;
+import org.restlet.engine.resource.AnnotationInfo;
+import org.restlet.engine.resource.AnnotationUtils;
+import org.restlet.representation.Variant;
+import org.restlet.resource.ResourceException;
+import org.restlet.resource.ServerResource;
+import org.restlet.service.MetadataService;
+
+/**
+ * Describes the expected requests and responses of a resource method.
+ *
+ * @author Jerome Louvel
+ */
+public class MethodInfo extends DocumentedInfo {
+
+ /**
+ * Automatically describe a method by discovering the resource's
+ * annotations.
+ *
+ * @param info
+ * The method description to update.
+ * @param resource
+ * The server resource to describe.
+ */
+ public static void describeAnnotations(MethodInfo info,
+ ServerResource resource) {
+ // Loop over the annotated Java methods
+ MetadataService metadataService = resource.getMetadataService();
+ List<AnnotationInfo> annotations = resource.isAnnotated() ? AnnotationUtils
+ .getInstance().getAnnotations(resource.getClass()) : null;
+
+ if (annotations != null && metadataService != null) {
+ for (AnnotationInfo annotationInfo : annotations) {
+ try {
+ if (info.getName()
+ .equals(annotationInfo.getRestletMethod())) {
+ // Describe the request
+ Class<?>[] classes = annotationInfo.getJavaInputTypes();
+
+ List<Variant> requestVariants = annotationInfo
+ .getRequestVariants(
+ resource.getMetadataService(),
+ resource.getConverterService());
+
+ if (requestVariants != null) {
+ for (Variant variant : requestVariants) {
+ if ((variant.getMediaType() != null)
+ && ((info.getRequest() == null) || !info
+ .getRequest()
+ .getRepresentations()
+ .contains(variant))) {
+ RepresentationInfo representationInfo = null;
+
+ if (info.getRequest() == null) {
+ info.setRequest(new RequestInfo());
+ }
+
+ if (resource instanceof ApisparkServerResource) {
+ representationInfo = ((ApisparkServerResource) resource)
+ .describe(info,
+ info.getRequest(),
+ classes[0], variant);
+ } else {
+ representationInfo = new RepresentationInfo(
+ variant);
+ }
+
+ info.getRequest().getRepresentations()
+ .add(representationInfo);
+ }
+ }
+ }
+
+ // Describe the response
+ Class<?> outputClass = annotationInfo
+ .getJavaOutputType();
+
+ if (outputClass != null) {
+ List<Variant> responseVariants = annotationInfo
+ .getResponseVariants(
+ resource.getMetadataService(),
+ resource.getConverterService());
+
+ if (responseVariants != null) {
+ for (Variant variant : responseVariants) {
+ if ((variant.getMediaType() != null)
+ && !info.getResponse()
+ .getRepresentations()
+ .contains(variant)) {
+ RepresentationInfo representationInfo = null;
+
+ if (resource instanceof ApisparkServerResource) {
+ representationInfo = ((ApisparkServerResource) resource)
+ .describe(info,
+ info.getResponse(),
+ outputClass,
+ variant);
+ } else {
+ representationInfo = new RepresentationInfo(
+ variant);
+ }
+
+ info.getResponse().getRepresentations()
+ .add(representationInfo);
+ }
+ }
+ }
+ }
+ }
+ } catch (IOException e) {
+ throw new ResourceException(e);
+ }
+ }
+ }
+ }
+
+ /** Identifier for the method. */
+ private String identifier;
+
+ /** Name of the method. */
+ private Method name;
+
+ /** Describes the input to the method. */
+ private RequestInfo request;
+
+ /** Describes the output of the method. */
+ private List<ResponseInfo> responses;
+
+ /** Reference to a method definition element. */
+ private Reference targetRef;
+
+ /**
+ * Constructor.
+ */
+ public MethodInfo() {
+ super();
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public MethodInfo(DocumentationInfo documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Constructor with a list of documentation elements.
+ *
+ * @param documentations
+ * The list of documentation elements.
+ */
+ public MethodInfo(List<DocumentationInfo> documentations) {
+ super(documentations);
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public MethodInfo(String documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Returns the identifier for the method.
+ *
+ * @return The identifier for the method.
+ */
+ public String getIdentifier() {
+ return this.identifier;
+ }
+
+ /**
+ * Returns the name of the method.
+ *
+ * @return The name of the method.
+ */
+
+ public Method getName() {
+ return this.name;
+ }
+
+ /**
+ * Returns the input to the method.
+ *
+ * @return The input to the method.
+ */
+ public RequestInfo getRequest() {
+ return this.request;
+ }
+
+ /**
+ * Returns the last added response of the method.
+ *
+ * @return The last added response of the method.
+ */
+ public ResponseInfo getResponse() {
+ if (getResponses().isEmpty()) {
+ getResponses().add(new ResponseInfo());
+ }
+
+ return getResponses().get(getResponses().size() - 1);
+ }
+
+ /**
+ * Returns the output of the method.
+ *
+ * @return The output of the method.
+ */
+ public List<ResponseInfo> getResponses() {
+ // Lazy initialization with double-check.
+ List<ResponseInfo> r = this.responses;
+ if (r == null) {
+ synchronized (this) {
+ r = this.responses;
+ if (r == null) {
+ this.responses = r = new ArrayList<ResponseInfo>();
+ }
+ }
+ }
+ return r;
+ }
+
+ /**
+ * Returns the reference to a method definition element.
+ *
+ * @return The reference to a method definition element.
+ */
+ public Reference getTargetRef() {
+ return this.targetRef;
+ }
+
+ /**
+ * Sets the identifier for the method.
+ *
+ * @param identifier
+ * The identifier for the method.
+ */
+ public void setIdentifier(String identifier) {
+ this.identifier = identifier;
+ }
+
+ /**
+ * Sets the name of the method.
+ *
+ * @param name
+ * The name of the method.
+ */
+ public void setName(Method name) {
+ this.name = name;
+ }
+
+ /**
+ * Sets the input to the method.
+ *
+ * @param request
+ * The input to the method.
+ */
+ public void setRequest(RequestInfo request) {
+ this.request = request;
+ }
+
+ /**
+ * Sets the output of the method.
+ *
+ * @param responses
+ * The output of the method.
+ */
+ public void setResponses(List<ResponseInfo> responses) {
+ this.responses = responses;
+ }
+
+ /**
+ * Sets the reference to a method definition element.
+ *
+ * @param targetRef
+ * The reference to a method definition element.
+ */
+ public void setTargetRef(Reference targetRef) {
+ this.targetRef = targetRef;
+ }
+
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/OptionInfo.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/OptionInfo.java
new file mode 100644
index 0000000000..8d5aa0d309
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/OptionInfo.java
@@ -0,0 +1,104 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import java.util.List;
+
+/**
+ * Defines a potential value for a parent parameter description.
+ *
+ * @author Jerome Louvel
+ */
+public class OptionInfo extends DocumentedInfo {
+
+ /** Value of this option element. */
+ private String value;
+
+ /**
+ * Constructor.
+ */
+ public OptionInfo() {
+ super();
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public OptionInfo(DocumentationInfo documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Constructor with a list of documentation elements.
+ *
+ * @param documentations
+ * The list of documentation elements.
+ */
+ public OptionInfo(List<DocumentationInfo> documentations) {
+ super(documentations);
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public OptionInfo(String documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Returns the value of this option element.
+ *
+ * @return The value of this option element.
+ */
+ public String getValue() {
+ return this.value;
+ }
+
+ /**
+ * Sets the value of this option element.
+ *
+ * @param value
+ * The value of this option element.
+ */
+ public void setValue(String value) {
+ this.value = value;
+ }
+
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ParameterInfo.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ParameterInfo.java
new file mode 100644
index 0000000000..c9137da42c
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ParameterInfo.java
@@ -0,0 +1,402 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Describes a parameterized aspect of a parent {@link ResourceInfo},
+ * {@link RequestInfo}, {@link ResponseInfo} or {@link RepresentationInfo}
+ * element.
+ *
+ * @author Jerome Louvel
+ */
+public class ParameterInfo extends DocumentedInfo {
+
+ /** Default value of this parameter. */
+ private String defaultValue;
+
+ /** Provides a fixed value for the parameter. */
+ private String fixed;
+
+ /** Identifier of this parameter element. */
+ private String identifier;
+
+ /** Link element. */
+ private LinkInfo link;
+
+ /** Name of this element. */
+ private String name;
+
+ /** List of option elements for that element. */
+ private List<OptionInfo> options;
+
+ /**
+ * Path to the value of this parameter (within a parent representation).
+ */
+ private String path;
+
+ /**
+ * Indicates whether the parameter is single valued or may have multiple
+ * values.
+ */
+ private boolean repeating;
+
+ /**
+ * Indicates whether the parameter is required.
+ */
+ private boolean required;
+
+ /** Parameter style. */
+ private ParameterStyle style;
+
+ /** Parameter type. */
+ private String type;
+
+ /**
+ * Constructor.
+ */
+ public ParameterInfo() {
+ super();
+ }
+
+ /**
+ * Constructor.
+ *
+ * @param name
+ * The name of the parameter.
+ * @param required
+ * True if thes parameter is required.
+ * @param type
+ * The type of the parameter.
+ * @param style
+ * The style of the parameter.
+ * @param documentation
+ * A single documentation element.
+ */
+ public ParameterInfo(String name, boolean required, String type,
+ ParameterStyle style, String documentation) {
+ super(documentation);
+ this.name = name;
+ this.required = required;
+ this.style = style;
+ this.type = type;
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param name
+ * The required name of the parameter.
+ * @param style
+ * The required style of the parameter.
+ * @param documentation
+ * A single documentation element.
+ */
+ public ParameterInfo(String name, ParameterStyle style,
+ DocumentationInfo documentation) {
+ super(documentation);
+ this.name = name;
+ this.style = style;
+ }
+
+ /**
+ * Constructor with a list of documentation elements.
+ *
+ * @param name
+ * The required name of the parameter.
+ * @param style
+ * The required style of the parameter.
+ * @param documentations
+ * The list of documentation elements.
+ */
+ public ParameterInfo(String name, ParameterStyle style,
+ List<DocumentationInfo> documentations) {
+ super(documentations);
+ this.name = name;
+ this.style = style;
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param name
+ * The required name of the parameter.
+ * @param style
+ * The required style of the parameter.
+ * @param documentation
+ * A single documentation element.
+ */
+ public ParameterInfo(String name, ParameterStyle style, String documentation) {
+ super(documentation);
+ this.name = name;
+ this.style = style;
+ }
+
+ /**
+ * Returns the default value of this parameter.
+ *
+ * @return The default value of this parameter.
+ */
+ public String getDefaultValue() {
+ return this.defaultValue;
+ }
+
+ /**
+ * Returns the fixed value for the parameter.
+ *
+ * @return The fixed value for the parameter.
+ */
+ public String getFixed() {
+ return this.fixed;
+ }
+
+ /**
+ * Returns the identifier of this parameter element.
+ *
+ * @return The identifier of this parameter element.
+ */
+
+ public String getIdentifier() {
+ return this.identifier;
+ }
+
+ /**
+ * Returns the link element.
+ *
+ * @return The link element.
+ */
+
+ public LinkInfo getLink() {
+ return this.link;
+ }
+
+ /**
+ * Returns the name of this element.
+ *
+ * @return The name of this element.
+ */
+
+ public String getName() {
+ return this.name;
+ }
+
+ /**
+ * Returns the list of option elements for that element.
+ *
+ * @return The list of option elements for that element.
+ */
+
+ public List<OptionInfo> getOptions() {
+ // Lazy initialization with double-check.
+ List<OptionInfo> o = this.options;
+ if (o == null) {
+ synchronized (this) {
+ o = this.options;
+ if (o == null) {
+ this.options = o = new ArrayList<OptionInfo>();
+ }
+ }
+ }
+ return o;
+ }
+
+ /**
+ * Returns the path to the value of this parameter (within a parent
+ * representation).
+ *
+ * @return The path to the value of this parameter (within a parent
+ * representation).
+ */
+
+ public String getPath() {
+ return this.path;
+ }
+
+ /**
+ * Returns the parameter style.
+ *
+ * @return The parameter style.
+ */
+
+ public ParameterStyle getStyle() {
+ return this.style;
+ }
+
+ /**
+ * Returns the parameter type.
+ *
+ * @return The parameter type.
+ */
+ public String getType() {
+ return this.type;
+ }
+
+ /**
+ * Returns true if the parameter is single valued or may have multiple
+ * values, false otherwise.
+ *
+ * @return True if the parameter is single valued or may have multiple
+ * values, false otherwise.
+ */
+
+ public boolean isRepeating() {
+ return this.repeating;
+ }
+
+ /**
+ * Indicates whether the parameter is required.
+ *
+ * @return True if the parameter is required, false otherwise.
+ */
+ public boolean isRequired() {
+ return this.required;
+ }
+
+ /**
+ * Sets the default value of this parameter.
+ *
+ * @param defaultValue
+ * The default value of this parameter.
+ */
+ public void setDefaultValue(String defaultValue) {
+ this.defaultValue = defaultValue;
+ }
+
+ /**
+ * Sets the fixed value for the parameter.
+ *
+ * @param fixed
+ * The fixed value for the parameter.
+ */
+ public void setFixed(String fixed) {
+ this.fixed = fixed;
+ }
+
+ /**
+ * Sets the identifier of this parameter element.
+ *
+ * @param identifier
+ * The identifier of this parameter element.
+ */
+ public void setIdentifier(String identifier) {
+ this.identifier = identifier;
+ }
+
+ /**
+ * Sets the link element.
+ *
+ * @param link
+ * The link element.
+ */
+ public void setLink(LinkInfo link) {
+ this.link = link;
+ }
+
+ /**
+ * Sets the name of this element.
+ *
+ * @param name
+ * The name of this element.
+ */
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ /**
+ * Sets the list of option elements for that element.
+ *
+ * @param options
+ * The list of option elements for that element.
+ */
+ public void setOptions(List<OptionInfo> options) {
+ this.options = options;
+ }
+
+ /**
+ * Sets the path to the value of this parameter (within a parent
+ * representation).
+ *
+ * @param path
+ * The path to the value of this parameter (within a parent
+ * representation).
+ */
+ public void setPath(String path) {
+ this.path = path;
+ }
+
+ /**
+ * Indicates whether the parameter is single valued or may have multiple
+ * values.
+ *
+ * @param repeating
+ * True if the parameter is single valued or may have multiple
+ * values, false otherwise.
+ */
+ public void setRepeating(boolean repeating) {
+ this.repeating = repeating;
+ }
+
+ /**
+ * Indicates whether the parameter is required.
+ *
+ * @param required
+ * True if the parameter is required, false otherwise.
+ */
+ public void setRequired(boolean required) {
+ this.required = required;
+ }
+
+ /**
+ * Sets the parameter style.
+ *
+ * @param style
+ * The parameter style.
+ */
+ public void setStyle(ParameterStyle style) {
+ this.style = style;
+ }
+
+ /**
+ * Sets the parameter type.
+ *
+ * @param type
+ * The parameter type.
+ */
+ public void setType(String type) {
+ this.type = type;
+ }
+
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ParameterStyle.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ParameterStyle.java
new file mode 100644
index 0000000000..6ee694a94b
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ParameterStyle.java
@@ -0,0 +1,63 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+/**
+ * Enumerates the supported styles of parameters.
+ *
+ * @author Jerome Louvel
+ */
+public enum ParameterStyle {
+
+ HEADER, MATRIX, PLAIN, QUERY, TEMPLATE;
+
+ @Override
+ public String toString() {
+ String result = null;
+ if (equals(HEADER)) {
+ result = "header";
+ } else if (equals(MATRIX)) {
+ result = "matrix";
+ } else if (equals(PLAIN)) {
+ result = "plain";
+ } else if (equals(QUERY)) {
+ result = "query";
+ } else if (equals(TEMPLATE)) {
+ result = "template";
+ }
+
+ return result;
+ }
+
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/RepresentationInfo.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/RepresentationInfo.java
new file mode 100644
index 0000000000..4d85e285ea
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/RepresentationInfo.java
@@ -0,0 +1,237 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.restlet.data.MediaType;
+import org.restlet.data.Reference;
+import org.restlet.representation.Variant;
+
+/**
+ * Describes a variant representation for a target resource.
+ *
+ * @author Jerome Louvel
+ */
+public class RepresentationInfo extends DocumentedInfo {
+
+ /** Identifier for that element. */
+ private String identifier;
+
+ /** Media type of that element. */
+ private MediaType mediaType;
+
+ /** List of parameters. */
+ private List<ParameterInfo> parameters;
+
+ /** List of locations of one or more meta data profiles. */
+ private List<Reference> profiles;
+
+ /** Reference to an representation identifier. */
+ private String reference;
+
+ /**
+ * Constructor.
+ */
+ public RepresentationInfo() {
+ super();
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public RepresentationInfo(DocumentationInfo documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Constructor with a list of documentation elements.
+ *
+ * @param documentations
+ * The list of documentation elements.
+ */
+ public RepresentationInfo(List<DocumentationInfo> documentations) {
+ super(documentations);
+ }
+
+ /**
+ * Constructor with a media type.
+ *
+ * @param mediaType
+ * The media type of the representation.
+ */
+ public RepresentationInfo(MediaType mediaType) {
+ setMediaType(mediaType);
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public RepresentationInfo(String documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Constructor with a variant.
+ *
+ * @param variant
+ * The variant to describe.
+ */
+ public RepresentationInfo(Variant variant) {
+ setMediaType(variant.getMediaType());
+ }
+
+ /**
+ * Returns the identifier for that element.
+ *
+ * @return The identifier for that element.
+ */
+ public String getIdentifier() {
+ return this.identifier;
+ }
+
+ /**
+ * Returns the media type of that element.
+ *
+ * @return The media type of that element.
+ */
+ public MediaType getMediaType() {
+ return this.mediaType;
+ }
+
+ /**
+ * Returns the list of parameters.
+ *
+ * @return The list of parameters.
+ */
+ public List<ParameterInfo> getParameters() {
+ // Lazy initialization with double-check.
+ List<ParameterInfo> p = this.parameters;
+ if (p == null) {
+ synchronized (this) {
+ p = this.parameters;
+ if (p == null) {
+ this.parameters = p = new ArrayList<ParameterInfo>();
+ }
+ }
+ }
+ return p;
+ }
+
+ /**
+ * Returns the list of locations of one or more meta data profiles.
+ *
+ * @return The list of locations of one or more meta data profiles.
+ */
+ public List<Reference> getProfiles() {
+ // Lazy initialization with double-check.
+ List<Reference> p = this.profiles;
+ if (p == null) {
+ synchronized (this) {
+ p = this.profiles;
+ if (p == null) {
+ this.profiles = p = new ArrayList<Reference>();
+ }
+ }
+ }
+ return p;
+ }
+
+ /**
+ * Returns the reference to an representation identifier.
+ *
+ * @return The reference to an representation identifier.
+ */
+ public String getReference() {
+ return reference;
+ }
+
+ /**
+ * Sets the identifier for that element.
+ *
+ * @param identifier
+ * The identifier for that element.
+ */
+ public void setIdentifier(String identifier) {
+ this.identifier = identifier;
+ }
+
+ /**
+ * Sets the media type of that element.
+ *
+ * @param mediaType
+ * The media type of that element.
+ */
+ public void setMediaType(MediaType mediaType) {
+ this.mediaType = mediaType;
+ }
+
+ /**
+ * Sets the list of parameters.
+ *
+ * @param parameters
+ * The list of parameters.
+ */
+ public void setParameters(List<ParameterInfo> parameters) {
+ this.parameters = parameters;
+ }
+
+ /**
+ * Sets the list of locations of one or more meta data profiles.
+ *
+ * @param profiles
+ * The list of locations of one or more meta data profiles.
+ */
+ public void setProfiles(List<Reference> profiles) {
+ this.profiles = profiles;
+ }
+
+ /**
+ * Sets the reference to an representation identifier.
+ *
+ * @param reference
+ * The reference to an representation identifier.
+ */
+ public void setReference(String reference) {
+ this.reference = reference;
+ }
+
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/RequestInfo.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/RequestInfo.java
new file mode 100644
index 0000000000..80070d771d
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/RequestInfo.java
@@ -0,0 +1,147 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Describes the properties of a request associated to a parent method.
+ *
+ * @author Jerome Louvel
+ */
+public class RequestInfo extends DocumentedInfo {
+
+ /** List of parameters. */
+ private List<ParameterInfo> parameters;
+
+ /** List of supported input representations. */
+ private List<RepresentationInfo> representations;
+
+ /**
+ * Constructor.
+ */
+ public RequestInfo() {
+ super();
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public RequestInfo(DocumentationInfo documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Constructor with a list of documentation elements.
+ *
+ * @param documentations
+ * The list of documentation elements.
+ */
+ public RequestInfo(List<DocumentationInfo> documentations) {
+ super(documentations);
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public RequestInfo(String documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Returns the list of parameters.
+ *
+ * @return The list of parameters.
+ */
+ public List<ParameterInfo> getParameters() {
+ // Lazy initialization with double-check.
+ List<ParameterInfo> p = this.parameters;
+ if (p == null) {
+ synchronized (this) {
+ p = this.parameters;
+ if (p == null) {
+ this.parameters = p = new ArrayList<ParameterInfo>();
+ }
+ }
+ }
+ return p;
+ }
+
+ /**
+ * Returns the list of supported input representations.
+ *
+ * @return The list of supported input representations.
+ */
+ public List<RepresentationInfo> getRepresentations() {
+ // Lazy initialization with double-check.
+ List<RepresentationInfo> r = this.representations;
+ if (r == null) {
+ synchronized (this) {
+ r = this.representations;
+ if (r == null) {
+ this.representations = r = new ArrayList<RepresentationInfo>();
+ }
+ }
+ }
+ return r;
+ }
+
+ /**
+ * Sets the list of parameters.
+ *
+ * @param parameters
+ * The list of parameters.
+ */
+ public void setParameters(List<ParameterInfo> parameters) {
+ this.parameters = parameters;
+ }
+
+ /**
+ * Sets the list of supported input representations.
+ *
+ * @param representations
+ * The list of supported input representations.
+ */
+ public void setRepresentations(List<RepresentationInfo> representations) {
+ this.representations = representations;
+ }
+
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ResourceInfo.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ResourceInfo.java
new file mode 100644
index 0000000000..92bb520674
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ResourceInfo.java
@@ -0,0 +1,412 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.restlet.data.MediaType;
+import org.restlet.data.Method;
+import org.restlet.data.Reference;
+import org.restlet.resource.Directory;
+import org.restlet.resource.ServerResource;
+
+/**
+ * Describes a class of closely related resources.
+ *
+ * @author Jerome Louvel
+ */
+public class ResourceInfo extends DocumentedInfo {
+
+ /**
+ * Returns a APISpark description of the current resource.
+ *
+ * @param applicationInfo
+ * The parent application.
+ * @param resource
+ * The resource to describe.
+ * @param path
+ * Path of the current resource.
+ * @param info
+ * APISpark description of the current resource to update.
+ */
+ public static void describe(ApplicationInfo applicationInfo,
+ ResourceInfo info, Object resource, String path) {
+ if ((path != null) && path.startsWith("/")) {
+ path = path.substring(1);
+ }
+
+ info.setPath(path);
+
+ // Introspect the current resource to detect the allowed methods
+ List<Method> methodsList = new ArrayList<Method>();
+
+ if (resource instanceof ServerResource) {
+ ((ServerResource) resource).updateAllowedMethods();
+ methodsList.addAll(((ServerResource) resource).getAllowedMethods());
+
+ if (resource instanceof ApisparkServerResource) {
+ info.setParameters(((ApisparkServerResource) resource)
+ .describeParameters());
+
+ if (applicationInfo != null) {
+ ((ApisparkServerResource) resource)
+ .describe(applicationInfo);
+ }
+ }
+ } else if (resource instanceof Directory) {
+ Directory directory = (Directory) resource;
+ methodsList.add(Method.GET);
+
+ if (directory.isModifiable()) {
+ methodsList.add(Method.DELETE);
+ methodsList.add(Method.PUT);
+ }
+ }
+
+ Method.sort(methodsList);
+
+ // Update the resource info with the description of the allowed methods
+ List<MethodInfo> methods = info.getMethods();
+ MethodInfo methodInfo;
+
+ for (Method method : methodsList) {
+ methodInfo = new MethodInfo();
+ methods.add(methodInfo);
+ methodInfo.setName(method);
+
+ if (resource instanceof ServerResource) {
+ if (resource instanceof ApisparkServerResource) {
+ ApisparkServerResource wsResource = (ApisparkServerResource) resource;
+
+ if (wsResource.canDescribe(method)) {
+ wsResource.describeMethod(method, methodInfo);
+ }
+ } else {
+ MethodInfo.describeAnnotations(methodInfo,
+ (ServerResource) resource);
+ }
+ }
+ }
+
+ // Document the resource
+ String title = null;
+ String textContent = null;
+
+ if (resource instanceof ApisparkServerResource) {
+ title = ((ApisparkServerResource) resource).getName();
+ textContent = ((ApisparkServerResource) resource).getDescription();
+ }
+
+ if ((title != null) && !"".equals(title)) {
+ DocumentationInfo doc = null;
+
+ if (info.getDocumentations().isEmpty()) {
+ doc = new DocumentationInfo();
+ info.getDocumentations().add(doc);
+ } else {
+ info.getDocumentations().get(0);
+ }
+
+ doc.setTitle(title);
+ doc.setTextContent(textContent);
+ }
+ }
+
+ /** List of child resources. */
+ private List<ResourceInfo> childResources;
+
+ /** Identifier for that element. */
+ private String identifier;
+
+ /** List of supported methods. */
+ private List<MethodInfo> methods;
+
+ /** List of parameters. */
+ private List<ParameterInfo> parameters;
+
+ /** URI template for the identifier of the resource. */
+ private String path;
+
+ /** Media type for the query component of the resource URI. */
+ private MediaType queryType;
+
+ /** List of references to resource type elements. */
+ private List<Reference> type;
+
+ /**
+ * Constructor.
+ */
+ public ResourceInfo() {
+ super();
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public ResourceInfo(DocumentationInfo documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Constructor with a list of documentation elements.
+ *
+ * @param documentations
+ * The list of documentation elements.
+ */
+ public ResourceInfo(List<DocumentationInfo> documentations) {
+ super(documentations);
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public ResourceInfo(String documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Creates an application descriptor that wraps this resource descriptor.
+ * The title of the resource, that is to say the title of its first
+ * documentation tag is transfered to the title of the first documentation
+ * tag of the main application tag.
+ *
+ * @return The new application descriptor.
+ */
+ public ApplicationInfo createApplication() {
+ ApplicationInfo result = new ApplicationInfo();
+
+ if (!getDocumentations().isEmpty()) {
+ String titleResource = getDocumentations().get(0).getTitle();
+ if (titleResource != null && !"".equals(titleResource)) {
+ DocumentationInfo doc = null;
+
+ if (result.getDocumentations().isEmpty()) {
+ doc = new DocumentationInfo();
+ result.getDocumentations().add(doc);
+ } else {
+ doc = result.getDocumentations().get(0);
+ }
+
+ doc.setTitle(titleResource);
+ }
+ }
+
+ ResourcesInfo resources = new ResourcesInfo();
+ result.setResources(resources);
+ resources.getResources().add(this);
+ return result;
+ }
+
+ /**
+ * Returns the list of child resources.
+ *
+ * @return The list of child resources.
+ */
+ public List<ResourceInfo> getChildResources() {
+ // Lazy initialization with double-check.
+ List<ResourceInfo> r = this.childResources;
+ if (r == null) {
+ synchronized (this) {
+ r = this.childResources;
+ if (r == null) {
+ this.childResources = r = new ArrayList<ResourceInfo>();
+ }
+ }
+ }
+ return r;
+ }
+
+ /**
+ * Returns the identifier for that element.
+ *
+ * @return The identifier for that element.
+ */
+ public String getIdentifier() {
+ return this.identifier;
+ }
+
+ /**
+ * Returns the list of supported methods.
+ *
+ * @return The list of supported methods.
+ */
+ public List<MethodInfo> getMethods() {
+ // Lazy initialization with double-check.
+ List<MethodInfo> m = this.methods;
+ if (m == null) {
+ synchronized (this) {
+ m = this.methods;
+
+ if (m == null) {
+ this.methods = m = new ArrayList<MethodInfo>();
+ }
+ }
+ }
+ return m;
+ }
+
+ /**
+ * Returns the list of parameters.
+ *
+ * @return The list of parameters.
+ */
+ public List<ParameterInfo> getParameters() {
+ // Lazy initialization with double-check.
+ List<ParameterInfo> p = this.parameters;
+ if (p == null) {
+ synchronized (this) {
+ p = this.parameters;
+ if (p == null) {
+ this.parameters = p = new ArrayList<ParameterInfo>();
+ }
+ }
+ }
+ return p;
+ }
+
+ /**
+ * Returns the URI template for the identifier of the resource.
+ *
+ * @return The URI template for the identifier of the resource.
+ */
+ public String getPath() {
+ return this.path;
+ }
+
+ /**
+ * Returns the media type for the query component of the resource URI.
+ *
+ * @return The media type for the query component of the resource URI.
+ */
+ public MediaType getQueryType() {
+ return this.queryType;
+ }
+
+ /**
+ * Returns the list of references to resource type elements.
+ *
+ * @return The list of references to resource type elements.
+ */
+ public List<Reference> getType() {
+ // Lazy initialization with double-check.
+ List<Reference> t = this.type;
+ if (t == null) {
+ synchronized (this) {
+ t = this.type;
+ if (t == null) {
+ this.type = t = new ArrayList<Reference>();
+ }
+ }
+ }
+ return t;
+ }
+
+ /**
+ * Sets the list of child resources.
+ *
+ * @param resources
+ * The list of child resources.
+ */
+ public void setChildResources(List<ResourceInfo> resources) {
+ this.childResources = resources;
+ }
+
+ /**
+ * Sets the identifier for that element.
+ *
+ * @param identifier
+ * The identifier for that element.
+ */
+ public void setIdentifier(String identifier) {
+ this.identifier = identifier;
+ }
+
+ /**
+ * Sets the list of supported methods.
+ *
+ * @param methods
+ * The list of supported methods.
+ */
+ public void setMethods(List<MethodInfo> methods) {
+ this.methods = methods;
+ }
+
+ /**
+ * Sets the list of parameters.
+ *
+ * @param parameters
+ * The list of parameters.
+ */
+ public void setParameters(List<ParameterInfo> parameters) {
+ this.parameters = parameters;
+ }
+
+ /**
+ * Sets the URI template for the identifier of the resource.
+ *
+ * @param path
+ * The URI template for the identifier of the resource.
+ */
+ public void setPath(String path) {
+ this.path = path;
+ }
+
+ /**
+ * Sets the media type for the query component of the resource URI.
+ *
+ * @param queryType
+ * The media type for the query component of the resource URI.
+ */
+ public void setQueryType(MediaType queryType) {
+ this.queryType = queryType;
+ }
+
+ /**
+ * Sets the list of references to resource type elements.
+ *
+ * @param type
+ * The list of references to resource type elements.
+ */
+ public void setType(List<Reference> type) {
+ this.type = type;
+ }
+
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ResourceTypeInfo.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ResourceTypeInfo.java
new file mode 100644
index 0000000000..38c7113203
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ResourceTypeInfo.java
@@ -0,0 +1,169 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Describes a reusable type of resources.
+ *
+ * @author Jerome Louvel
+ */
+public class ResourceTypeInfo extends DocumentedInfo {
+
+ /** Identifier for that element. */
+ private String identifier;
+
+ /** List of supported methods. */
+ private List<MethodInfo> methods;
+
+ /** List of parameters. */
+ private List<ParameterInfo> parameters;
+
+ /**
+ * Constructor.
+ */
+ public ResourceTypeInfo() {
+ super();
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public ResourceTypeInfo(DocumentationInfo documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Constructor with a list of documentation elements.
+ *
+ * @param documentations
+ * The list of documentation elements.
+ */
+ public ResourceTypeInfo(List<DocumentationInfo> documentations) {
+ super(documentations);
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public ResourceTypeInfo(String documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Returns the identifier for that element.
+ *
+ * @return The identifier for that element.
+ */
+ public String getIdentifier() {
+ return this.identifier;
+ }
+
+ /**
+ * Returns the list of supported methods.
+ *
+ * @return The list of supported methods.
+ */
+ public List<MethodInfo> getMethods() {
+ // Lazy initialization with double-check.
+ List<MethodInfo> m = this.methods;
+ if (m == null) {
+ synchronized (this) {
+ m = this.methods;
+ if (m == null) {
+ this.methods = m = new ArrayList<MethodInfo>();
+ }
+ }
+ }
+ return m;
+ }
+
+ /**
+ * Returns the list of parameters.
+ *
+ * @return The list of parameters.
+ */
+ public List<ParameterInfo> getParameters() {
+ // Lazy initialization with double-check.
+ List<ParameterInfo> p = this.parameters;
+ if (p == null) {
+ synchronized (this) {
+ p = this.parameters;
+ if (p == null) {
+ this.parameters = p = new ArrayList<ParameterInfo>();
+ }
+ }
+ }
+ return p;
+ }
+
+ /**
+ * Sets the identifier for that element.
+ *
+ * @param identifier
+ * The identifier for that element.
+ */
+ public void setIdentifier(String identifier) {
+ this.identifier = identifier;
+ }
+
+ /**
+ * Sets the list of supported methods.
+ *
+ * @param methods
+ * The list of supported methods.
+ */
+ public void setMethods(List<MethodInfo> methods) {
+ this.methods = methods;
+ }
+
+ /**
+ * Sets the list of parameters.
+ *
+ * @param parameters
+ * The list of parameters.
+ */
+ public void setParameters(List<ParameterInfo> parameters) {
+ this.parameters = parameters;
+ }
+
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ResourcesInfo.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ResourcesInfo.java
new file mode 100644
index 0000000000..0274dcd5e7
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ResourcesInfo.java
@@ -0,0 +1,138 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.restlet.data.Reference;
+
+/**
+ * Describes the root resources of an application.
+ *
+ * @author Jerome Louvel
+ */
+public class ResourcesInfo extends DocumentedInfo {
+ /** Base URI for each child resource identifier. */
+ private Reference baseRef;
+
+ /** List of child resources. */
+ private List<ResourceInfo> resources;
+
+ /**
+ * Constructor.
+ */
+ public ResourcesInfo() {
+ super();
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public ResourcesInfo(DocumentationInfo documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Constructor with a list of documentation elements.
+ *
+ * @param documentations
+ * The list of documentation elements.
+ */
+ public ResourcesInfo(List<DocumentationInfo> documentations) {
+ super(documentations);
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public ResourcesInfo(String documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Returns the base URI for each child resource identifier.
+ *
+ * @return The base URI for each child resource identifier.
+ */
+ public Reference getBaseRef() {
+ return this.baseRef;
+ }
+
+ /**
+ * Returns the list of child resources.
+ *
+ * @return The list of child resources.
+ */
+ public List<ResourceInfo> getResources() {
+ // Lazy initialization with double-check.
+ List<ResourceInfo> r = this.resources;
+ if (r == null) {
+ synchronized (this) {
+ r = this.resources;
+ if (r == null) {
+ this.resources = r = new ArrayList<ResourceInfo>();
+ }
+ }
+ }
+ return r;
+ }
+
+ /**
+ * Sets the base URI for each child resource identifier.
+ *
+ * @param baseRef
+ * The base URI for each child resource identifier.
+ */
+ public void setBaseRef(Reference baseRef) {
+ this.baseRef = baseRef;
+ }
+
+ /**
+ * Sets the list of child resources.
+ *
+ * @param resources
+ * The list of child resources.
+ */
+ public void setResources(List<ResourceInfo> resources) {
+ this.resources = resources;
+ }
+
+}
diff --git a/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ResponseInfo.java b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ResponseInfo.java
new file mode 100644
index 0000000000..0a653d33af
--- /dev/null
+++ b/modules/org.restlet.ext.apispark/src/org/restlet/ext/apispark/info/ResponseInfo.java
@@ -0,0 +1,186 @@
+/**
+ * Copyright 2005-2014 Restlet
+ *
+ * The contents of this file are subject to the terms of one of the following
+ * open source licenses: Apache 2.0 or LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL
+ * 1.0 (the "Licenses"). You can select the license that you prefer but you may
+ * not use this file except in compliance with one of these Licenses.
+ *
+ * You can obtain a copy of the Apache 2.0 license at
+ * http://www.opensource.org/licenses/apache-2.0
+ *
+ * You can obtain a copy of the LGPL 3.0 license at
+ * http://www.opensource.org/licenses/lgpl-3.0
+ *
+ * You can obtain a copy of the LGPL 2.1 license at
+ * http://www.opensource.org/licenses/lgpl-2.1
+ *
+ * You can obtain a copy of the CDDL 1.0 license at
+ * http://www.opensource.org/licenses/cddl1
+ *
+ * You can obtain a copy of the EPL 1.0 license at
+ * http://www.opensource.org/licenses/eclipse-1.0
+ *
+ * See the Licenses for the specific language governing permissions and
+ * limitations under the Licenses.
+ *
+ * Alternatively, you can obtain a royalty free commercial license with less
+ * limitations, transferable or non-transferable, directly at
+ * http://restlet.com/products/restlet-framework
+ *
+ * Restlet is a registered trademark of Restlet S.A.S.
+ */
+
+package org.restlet.ext.apispark.info;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.restlet.data.Status;
+
+/**
+ * Describes the properties of a response associated to a parent method.
+ *
+ * @author Jerome Louvel
+ */
+public class ResponseInfo extends DocumentedInfo {
+
+ /** List of parameters. */
+ private List<ParameterInfo> parameters;
+
+ /** List of representations. */
+ private List<RepresentationInfo> representations;
+
+ /**
+ * List of statuses associated with this response representation.
+ */
+ private List<Status> statuses;
+
+ /**
+ * Constructor.
+ */
+ public ResponseInfo() {
+ super();
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public ResponseInfo(DocumentationInfo documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Constructor with a list of documentation elements.
+ *
+ * @param documentations
+ * The list of documentation elements.
+ */
+ public ResponseInfo(List<DocumentationInfo> documentations) {
+ super(documentations);
+ }
+
+ /**
+ * Constructor with a single documentation element.
+ *
+ * @param documentation
+ * A single documentation element.
+ */
+ public ResponseInfo(String documentation) {
+ super(documentation);
+ }
+
+ /**
+ * Returns the list of parameters.
+ *
+ * @return The list of parameters.
+ */
+ public List<ParameterInfo> getParameters() {
+ // Lazy initialization with double-check.
+ List<ParameterInfo> p = this.parameters;
+ if (p == null) {
+ synchronized (this) {
+ p = this.parameters;
+ if (p == null) {
+ this.parameters = p = new ArrayList<ParameterInfo>();
+ }
+ }
+ }
+ return p;
+ }
+
+ /**
+ * Returns the list of representations
+ *
+ * @return The list of representations
+ */
+ public List<RepresentationInfo> getRepresentations() {
+ // Lazy initialization with double-check.
+ List<RepresentationInfo> r = this.representations;
+ if (r == null) {
+ synchronized (this) {
+ r = this.representations;
+ if (r == null) {
+ this.representations = r = new ArrayList<RepresentationInfo>();
+ }
+ }
+ }
+ return r;
+ }
+
+ /**
+ * Returns the list of statuses associated with this response
+ * representation.
+ *
+ * @return The list of statuses associated with this response
+ * representation.
+ */
+ public List<Status> getStatuses() {
+ // Lazy initialization with double-check.
+ List<Status> s = this.statuses;
+ if (s == null) {
+ synchronized (this) {
+ s = this.statuses;
+ if (s == null) {
+ this.statuses = s = new ArrayList<Status>();
+ }
+ }
+ }
+ return s;
+ }
+
+ /**
+ * Sets the list of parameters.
+ *
+ * @param parameters
+ * The list of parameters.
+ */
+ public void setParameters(List<ParameterInfo> parameters) {
+ this.parameters = parameters;
+ }
+
+ /**
+ * Sets the list of representations
+ *
+ * @param representations
+ * The list of representations
+ */
+ public void setRepresentations(List<RepresentationInfo> representations) {
+ this.representations = representations;
+ }
+
+ /**
+ * Sets the list of statuses associated with this response representation.
+ *
+ * @param statuses
+ * The list of statuses associated with this response
+ * representation.
+ */
+ public void setStatuses(List<Status> statuses) {
+ this.statuses = statuses;
+ }
+
+}
diff --git a/modules/org.restlet/.settings/org.eclipse.jdt.core.prefs b/modules/org.restlet/.settings/org.eclipse.jdt.core.prefs
new file mode 100644
index 0000000000..f42de363af
--- /dev/null
+++ b/modules/org.restlet/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,7 @@
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
+org.eclipse.jdt.core.compiler.compliance=1.7
+org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
+org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
+org.eclipse.jdt.core.compiler.source=1.7
|
629acade597d92dbfd8d020497a0ba523afd1754
|
ReactiveX-RxJava
|
Add operators to create Observables from- BroadcastReceiver--it allows to listen global and local (with support LocalBroadcastManager) broadcasts-
|
a
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-contrib/rxjava-android/src/main/java/rx/android/observables/AndroidObservable.java b/rxjava-contrib/rxjava-android/src/main/java/rx/android/observables/AndroidObservable.java
index 498a9547f7..e891ded435 100644
--- a/rxjava-contrib/rxjava-android/src/main/java/rx/android/observables/AndroidObservable.java
+++ b/rxjava-contrib/rxjava-android/src/main/java/rx/android/observables/AndroidObservable.java
@@ -15,16 +15,22 @@
*/
package rx.android.observables;
-import static rx.android.schedulers.AndroidSchedulers.mainThread;
+import android.app.Activity;
+import android.app.Fragment;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.os.Build;
+import android.os.Handler;
import rx.Observable;
import rx.functions.Func1;
-import rx.operators.OperatorObserveFromAndroidComponent;
+import rx.operators.OperatorBroadcastRegister;
import rx.operators.OperatorConditionalBinding;
+import rx.operators.OperatorLocalBroadcastRegister;
+import rx.operators.OperatorObserveFromAndroidComponent;
-import android.app.Activity;
-import android.app.Fragment;
-import android.os.Build;
+import static rx.android.schedulers.AndroidSchedulers.mainThread;
public final class AndroidObservable {
@@ -176,4 +182,37 @@ public static <T> Observable<T> bindFragment(Object fragment, Observable<T> sour
throw new IllegalArgumentException("Target fragment is neither a native nor support library Fragment");
}
}
+
+ /**
+ * Create Observable that wraps BroadcastReceiver and emmit received intents.
+ *
+ * @param filter Selects the Intent broadcasts to be received.
+ */
+ public static Observable<Intent> fromBroadcast(Context context, IntentFilter filter){
+ return Observable.create(new OperatorBroadcastRegister(context, filter, null, null));
+ }
+
+ /**
+ * Create Observable that wraps BroadcastReceiver and emmit received intents.
+ *
+ * @param filter Selects the Intent broadcasts to be received.
+ * @param broadcastPermission String naming a permissions that a
+ * broadcaster must hold in order to send an Intent to you. If null,
+ * no permission is required.
+ * @param schedulerHandler Handler identifying the thread that will receive
+ * the Intent. If null, the main thread of the process will be used.
+ */
+ public static Observable<Intent> fromBroadcast(Context context, IntentFilter filter, String broadcastPermission, Handler schedulerHandler){
+ return Observable.create(new OperatorBroadcastRegister(context, filter, broadcastPermission, schedulerHandler));
+ }
+
+ /**
+ * Create Observable that wraps BroadcastReceiver and connects to LocalBroadcastManager
+ * to emmit received intents.
+ *
+ * @param filter Selects the Intent broadcasts to be received.
+ */
+ public static Observable<Intent> fromLocalBroadcast(Context context, IntentFilter filter){
+ return Observable.create(new OperatorLocalBroadcastRegister(context, filter));
+ }
}
diff --git a/rxjava-contrib/rxjava-android/src/main/java/rx/operators/OperatorBroadcastRegister.java b/rxjava-contrib/rxjava-android/src/main/java/rx/operators/OperatorBroadcastRegister.java
new file mode 100644
index 0000000000..49bc1dfaf4
--- /dev/null
+++ b/rxjava-contrib/rxjava-android/src/main/java/rx/operators/OperatorBroadcastRegister.java
@@ -0,0 +1,67 @@
+/**
+ * Copyright 2014 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package rx.operators;
+
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.os.Handler;
+
+import rx.Observable;
+import rx.Subscriber;
+import rx.Subscription;
+import rx.android.subscriptions.AndroidSubscriptions;
+import rx.functions.Action0;
+
+public class OperatorBroadcastRegister implements Observable.OnSubscribe<Intent> {
+
+ private final Context context;
+ private final IntentFilter intentFilter;
+ private final String broadcastPermission;
+ private final Handler schedulerHandler;
+
+ public OperatorBroadcastRegister(Context context, IntentFilter intentFilter, String broadcastPermission, Handler schedulerHandler) {
+ this.context = context;
+ this.intentFilter = intentFilter;
+ this.broadcastPermission = broadcastPermission;
+ this.schedulerHandler = schedulerHandler;
+ }
+
+ @Override
+ public void call(final Subscriber<? super Intent> subscriber) {
+ final BroadcastReceiver broadcastReceiver = new BroadcastReceiver() {
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ subscriber.onNext(intent);
+ }
+ };
+
+ final Subscription subscription = AndroidSubscriptions.unsubscribeInUiThread(new Action0() {
+ @Override
+ public void call() {
+ context.unregisterReceiver(broadcastReceiver);
+ }
+ });
+
+ subscriber.add(subscription);
+ Intent stickyIntent = context.registerReceiver(broadcastReceiver, intentFilter, broadcastPermission, schedulerHandler);
+ if (stickyIntent != null) {
+ subscriber.onNext(stickyIntent);
+ }
+
+ }
+}
diff --git a/rxjava-contrib/rxjava-android/src/main/java/rx/operators/OperatorLocalBroadcastRegister.java b/rxjava-contrib/rxjava-android/src/main/java/rx/operators/OperatorLocalBroadcastRegister.java
new file mode 100644
index 0000000000..22a8a8959c
--- /dev/null
+++ b/rxjava-contrib/rxjava-android/src/main/java/rx/operators/OperatorLocalBroadcastRegister.java
@@ -0,0 +1,60 @@
+/**
+ * Copyright 2014 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package rx.operators;
+
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.support.v4.content.LocalBroadcastManager;
+
+import rx.Observable;
+import rx.Subscriber;
+import rx.Subscription;
+import rx.android.subscriptions.AndroidSubscriptions;
+import rx.functions.Action0;
+
+public class OperatorLocalBroadcastRegister implements Observable.OnSubscribe<Intent> {
+
+ private final Context context;
+ private final IntentFilter intentFilter;
+
+ public OperatorLocalBroadcastRegister(Context context, IntentFilter intentFilter) {
+ this.context = context;
+ this.intentFilter = intentFilter;
+ }
+
+ @Override
+ public void call(final Subscriber<? super Intent> subscriber) {
+ final LocalBroadcastManager localBroadcastManager = LocalBroadcastManager.getInstance(context);
+ final BroadcastReceiver broadcastReceiver = new BroadcastReceiver() {
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ subscriber.onNext(intent);
+ }
+ };
+
+ final Subscription subscription = AndroidSubscriptions.unsubscribeInUiThread(new Action0() {
+ @Override
+ public void call() {
+ localBroadcastManager.unregisterReceiver(broadcastReceiver);
+ }
+ });
+
+ subscriber.add(subscription);
+ localBroadcastManager.registerReceiver(broadcastReceiver, intentFilter);
+ }
+}
diff --git a/rxjava-contrib/rxjava-android/src/test/java/rx/android/operators/OperatorLocalBroadcastRegisterTest.java b/rxjava-contrib/rxjava-android/src/test/java/rx/android/operators/OperatorLocalBroadcastRegisterTest.java
new file mode 100644
index 0000000000..902c49022d
--- /dev/null
+++ b/rxjava-contrib/rxjava-android/src/test/java/rx/android/operators/OperatorLocalBroadcastRegisterTest.java
@@ -0,0 +1,73 @@
+/**
+ * Copyright 2014 Netflix, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package rx.android.operators;
+
+import android.app.Application;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.support.v4.content.LocalBroadcastManager;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.InOrder;
+import org.robolectric.Robolectric;
+import org.robolectric.RobolectricTestRunner;
+
+import rx.Observable;
+import rx.Observer;
+import rx.Subscription;
+import rx.android.observables.AndroidObservable;
+import rx.observers.TestObserver;
+
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.inOrder;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+
+@RunWith(RobolectricTestRunner.class)
+public class OperatorLocalBroadcastRegisterTest {
+
+ @Test
+ @SuppressWarnings("unchecked")
+ public void testLocalBroadcast() {
+ String action = "TEST_ACTION";
+ IntentFilter intentFilter = new IntentFilter(action);
+ Application application = Robolectric.application;
+ Observable<Intent> observable = AndroidObservable.fromLocalBroadcast(application, intentFilter);
+ final Observer<Intent> observer = mock(Observer.class);
+ final Subscription subscription = observable.subscribe(new TestObserver<Intent>(observer));
+
+ final InOrder inOrder = inOrder(observer);
+
+ inOrder.verify(observer, never()).onNext(any(Intent.class));
+
+ Intent intent = new Intent(action);
+ LocalBroadcastManager localBroadcastManager = LocalBroadcastManager.getInstance(application);
+ localBroadcastManager.sendBroadcast(intent);
+ inOrder.verify(observer, times(1)).onNext(intent);
+
+ localBroadcastManager.sendBroadcast(intent);
+ inOrder.verify(observer, times(1)).onNext(intent);
+
+ subscription.unsubscribe();
+ inOrder.verify(observer, never()).onNext(any(Intent.class));
+
+ inOrder.verify(observer, never()).onError(any(Throwable.class));
+ inOrder.verify(observer, never()).onCompleted();
+ }
+
+}
|
f2a59c6ea1b336cf2b7563fccbeecf278777808a
|
ReactiveX-RxJava
|
Fix for back pressure on the alternate- subscription.--
|
c
|
https://github.com/ReactiveX/RxJava
|
diff --git a/src/main/java/rx/internal/operators/OperatorSwitchIfEmpty.java b/src/main/java/rx/internal/operators/OperatorSwitchIfEmpty.java
index 67fafbd8d2..615594cc17 100644
--- a/src/main/java/rx/internal/operators/OperatorSwitchIfEmpty.java
+++ b/src/main/java/rx/internal/operators/OperatorSwitchIfEmpty.java
@@ -76,6 +76,17 @@ public void onCompleted() {
private void subscribeToAlternate() {
child.add(alternate.unsafeSubscribe(new Subscriber<T>() {
+
+ @Override
+ public void setProducer(final Producer producer) {
+ child.setProducer(new Producer() {
+ @Override
+ public void request(long n) {
+ producer.request(n);
+ }
+ });
+ }
+
@Override
public void onStart() {
final long capacity = consumerCapacity.get();
diff --git a/src/test/java/rx/internal/operators/OperatorSwitchIfEmptyTest.java b/src/test/java/rx/internal/operators/OperatorSwitchIfEmptyTest.java
index 443953563d..3fc735ccab 100644
--- a/src/test/java/rx/internal/operators/OperatorSwitchIfEmptyTest.java
+++ b/src/test/java/rx/internal/operators/OperatorSwitchIfEmptyTest.java
@@ -23,7 +23,9 @@
import rx.functions.Action0;
import rx.subscriptions.Subscriptions;
+import java.util.ArrayList;
import java.util.Arrays;
+import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.junit.Assert.assertEquals;
@@ -56,13 +58,15 @@ public void testSwitchWhenEmpty() throws Exception {
@Test
public void testSwitchWithProducer() throws Exception {
+ final AtomicBoolean emitted = new AtomicBoolean(false);
Observable<Long> withProducer = Observable.create(new Observable.OnSubscribe<Long>() {
@Override
public void call(final Subscriber<? super Long> subscriber) {
subscriber.setProducer(new Producer() {
@Override
public void request(long n) {
- if (n > 0) {
+ if (n > 0 && !emitted.get()) {
+ emitted.set(true);
subscriber.onNext(42L);
subscriber.onCompleted();
}
@@ -127,4 +131,33 @@ public void call(final Subscriber<? super Long> subscriber) {
}).switchIfEmpty(Observable.<Long>never()).subscribe();
assertTrue(s.isUnsubscribed());
}
+
+ @Test
+ public void testSwitchRequestAlternativeObservableWithBackpressure() {
+ final List<Integer> items = new ArrayList<Integer>();
+
+ Observable.<Integer>empty().switchIfEmpty(Observable.just(1, 2, 3)).subscribe(new Subscriber<Integer>() {
+
+ @Override
+ public void onStart() {
+ request(1);
+ }
+
+ @Override
+ public void onCompleted() {
+
+ }
+
+ @Override
+ public void onError(Throwable e) {
+
+ }
+
+ @Override
+ public void onNext(Integer integer) {
+ items.add(integer);
+ }
+ });
+ assertEquals(Arrays.asList(1), items);
+ }
}
\ No newline at end of file
|
d380a628bfac05b158a059306edf68baa2b33abd
|
hbase
|
HBASE-1537 Intra-row scanning; apply limit over- multiple families--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@951682 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/hbase
|
diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 2c324cceb37f..f26efbb36c92 100644
--- a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -2065,7 +2065,7 @@ private boolean nextInternal(int limit) throws IOException {
} else {
byte [] nextRow;
do {
- this.storeHeap.next(results, limit);
+ this.storeHeap.next(results, limit - results.size());
if (limit > 0 && results.size() == limit) {
if (this.filter != null && filter.hasFilterRow()) throw new IncompatibleFilterException(
"Filter with filterRow(List<KeyValue>) incompatible with scan with limit!");
diff --git a/src/main/resources/org/apache/hadoop/hbase/rest/XMLSchema.xsd b/src/main/resources/org/apache/hadoop/hbase/rest/XMLSchema.xsd
new file mode 100644
index 000000000000..fcaf810cd6c9
--- /dev/null
+++ b/src/main/resources/org/apache/hadoop/hbase/rest/XMLSchema.xsd
@@ -0,0 +1,152 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<schema targetNamespace="ModelSchema" elementFormDefault="qualified" xmlns="http://www.w3.org/2001/XMLSchema" xmlns:tns="ModelSchema">
+
+ <element name="Version" type="tns:Version"></element>
+
+ <complexType name="Version">
+ <attribute name="REST" type="string"></attribute>
+ <attribute name="JVM" type="string"></attribute>
+ <attribute name="OS" type="string"></attribute>
+ <attribute name="Server" type="string"></attribute>
+ <attribute name="Jersey" type="string"></attribute>
+ </complexType>
+
+ <element name="TableList" type="tns:TableList"></element>
+
+ <complexType name="TableList">
+ <sequence>
+ <element name="table" type="tns:Table" maxOccurs="unbounded" minOccurs="1"></element>
+ </sequence>
+ </complexType>
+
+ <complexType name="Table">
+ <sequence>
+ <element name="name" type="string"></element>
+ </sequence>
+ </complexType>
+
+ <element name="TableInfo" type="tns:TableInfo"></element>
+
+ <complexType name="TableInfo">
+ <sequence>
+ <element name="region" type="tns:TableRegion" maxOccurs="unbounded" minOccurs="1"></element>
+ </sequence>
+ <attribute name="name" type="string"></attribute>
+ </complexType>
+
+ <complexType name="TableRegion">
+ <attribute name="name" type="string"></attribute>
+ <attribute name="id" type="int"></attribute>
+ <attribute name="startKey" type="base64Binary"></attribute>
+ <attribute name="endKey" type="base64Binary"></attribute>
+ <attribute name="location" type="string"></attribute>
+ </complexType>
+
+ <element name="TableSchema" type="tns:TableSchema"></element>
+
+ <complexType name="TableSchema">
+ <sequence>
+ <element name="column" type="tns:ColumnSchema" maxOccurs="unbounded" minOccurs="1"></element>
+ </sequence>
+ <attribute name="name" type="string"></attribute>
+ <anyAttribute></anyAttribute>
+ </complexType>
+
+ <complexType name="ColumnSchema">
+ <attribute name="name" type="string"></attribute>
+ <anyAttribute></anyAttribute>
+ </complexType>
+
+ <element name="CellSet" type="tns:CellSet"></element>
+
+ <complexType name="CellSet">
+ <sequence>
+ <element name="row" type="tns:Row" maxOccurs="unbounded" minOccurs="1"></element>
+ </sequence>
+ </complexType>
+
+ <element name="Row" type="tns:Row"></element>
+
+ <complexType name="Row">
+ <sequence>
+ <element name="key" type="base64Binary"></element>
+ <element name="cell" type="tns:Cell" maxOccurs="unbounded" minOccurs="1"></element>
+ </sequence>
+ </complexType>
+
+ <element name="Cell" type="tns:Cell"></element>
+
+ <complexType name="Cell">
+ <sequence>
+ <element name="value" maxOccurs="1" minOccurs="1">
+ <simpleType><restriction base="base64Binary">
+ </simpleType>
+ </element>
+ </sequence>
+ <attribute name="column" type="base64Binary" />
+ <attribute name="timestamp" type="int" />
+ </complexType>
+
+ <element name="Scanner" type="tns:Scanner"></element>
+
+ <complexType name="Scanner">
+ <sequence>
+ <element name="column" type="base64Binary" minOccurs="0" maxOccurs="unbounded"></element>
+ </sequence>
+ <sequence>
+ <element name="filter" type="string" minOccurs="0" maxOccurs="1"></element>
+ </sequence>
+ <attribute name="startRow" type="base64Binary"></attribute>
+ <attribute name="endRow" type="base64Binary"></attribute>
+ <attribute name="batch" type="int"></attribute>
+ <attribute name="startTime" type="int"></attribute>
+ <attribute name="endTime" type="int"></attribute>
+ </complexType>
+
+ <element name="StorageClusterVersion" type="tns:StorageClusterVersion" />
+
+ <complexType name="StorageClusterVersion">
+ <attribute name="version" type="string"></attribute>
+ </complexType>
+
+ <element name="StorageClusterStatus"
+ type="tns:StorageClusterStatus">
+ </element>
+
+ <complexType name="StorageClusterStatus">
+ <sequence>
+ <element name="liveNode" type="tns:Node"
+ maxOccurs="unbounded" minOccurs="0">
+ </element>
+ <element name="deadNode" type="string" maxOccurs="unbounded"
+ minOccurs="0">
+ </element>
+ </sequence>
+ <attribute name="regions" type="int"></attribute>
+ <attribute name="requests" type="int"></attribute>
+ <attribute name="averageLoad" type="float"></attribute>
+ </complexType>
+
+ <complexType name="Node">
+ <sequence>
+ <element name="region" type="tns:Region"
+ maxOccurs="unbounded" minOccurs="0">
+ </element>
+ </sequence>
+ <attribute name="name" type="string"></attribute>
+ <attribute name="startCode" type="int"></attribute>
+ <attribute name="requests" type="int"></attribute>
+ <attribute name="heapSizeMB" type="int"></attribute>
+ <attribute name="maxHeapSizeMB" type="int"></attribute>
+ </complexType>
+
+ <complexType name="Region">
+ <attribute name="name" type="base64Binary"></attribute>
+ <attribute name="stores" type="int"></attribute>
+ <attribute name="storefiles" type="int"></attribute>
+ <attribute name="storefileSizeMB" type="int"></attribute>
+ <attribute name="memstoreSizeMB" type="int"></attribute>
+ <attribute name="storefileIndexSizeMB" type="int"></attribute>
+ </complexType>
+
+</schema>
\ No newline at end of file
diff --git a/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java b/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java
index 0d5a17a55ed9..106cbc121cf2 100644
--- a/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java
+++ b/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java
@@ -23,6 +23,7 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -41,25 +42,39 @@
public class TestWideScanner extends HBaseTestCase {
private final Log LOG = LogFactory.getLog(this.getClass());
- static final int BATCH = 1000;
-
- private MiniDFSCluster cluster = null;
- private HRegion r;
-
+ static final byte[] A = Bytes.toBytes("A");
+ static final byte[] B = Bytes.toBytes("B");
+ static final byte[] C = Bytes.toBytes("C");
+ static byte[][] COLUMNS = { A, B, C };
+ static final Random rng = new Random();
static final HTableDescriptor TESTTABLEDESC =
new HTableDescriptor("testwidescan");
static {
- TESTTABLEDESC.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY,
+ TESTTABLEDESC.addFamily(new HColumnDescriptor(A,
+ 10, // Ten is arbitrary number. Keep versions to help debuggging.
+ Compression.Algorithm.NONE.getName(), false, true, 8 * 1024,
+ HConstants.FOREVER, StoreFile.BloomType.NONE.toString(),
+ HColumnDescriptor.DEFAULT_REPLICATION_SCOPE));
+ TESTTABLEDESC.addFamily(new HColumnDescriptor(B,
+ 10, // Ten is arbitrary number. Keep versions to help debuggging.
+ Compression.Algorithm.NONE.getName(), false, true, 8 * 1024,
+ HConstants.FOREVER, StoreFile.BloomType.NONE.toString(),
+ HColumnDescriptor.DEFAULT_REPLICATION_SCOPE));
+ TESTTABLEDESC.addFamily(new HColumnDescriptor(C,
10, // Ten is arbitrary number. Keep versions to help debuggging.
Compression.Algorithm.NONE.getName(), false, true, 8 * 1024,
HConstants.FOREVER, StoreFile.BloomType.NONE.toString(),
HColumnDescriptor.DEFAULT_REPLICATION_SCOPE));
}
+
/** HRegionInfo for root region */
public static final HRegionInfo REGION_INFO =
new HRegionInfo(TESTTABLEDESC, HConstants.EMPTY_BYTE_ARRAY,
HConstants.EMPTY_BYTE_ARRAY);
+ MiniDFSCluster cluster = null;
+ HRegion r;
+
@Override
public void setUp() throws Exception {
cluster = new MiniDFSCluster(conf, 2, true, (String[])null);
@@ -69,30 +84,15 @@ public void setUp() throws Exception {
super.setUp();
}
- private int addWideContent(HRegion region, byte[] family)
- throws IOException {
+ private int addWideContent(HRegion region) throws IOException {
int count = 0;
- // add a few rows of 2500 columns (we'll use batch of 1000) to make things
- // interesting
for (char c = 'a'; c <= 'c'; c++) {
byte[] row = Bytes.toBytes("ab" + c);
int i;
for (i = 0; i < 2500; i++) {
byte[] b = Bytes.toBytes(String.format("%10d", i));
Put put = new Put(row);
- put.add(family, b, b);
- region.put(put);
- count++;
- }
- }
- // add one row of 100,000 columns
- {
- byte[] row = Bytes.toBytes("abf");
- int i;
- for (i = 0; i < 100000; i++) {
- byte[] b = Bytes.toBytes(String.format("%10d", i));
- Put put = new Put(row);
- put.add(family, b, b);
+ put.add(COLUMNS[rng.nextInt(COLUMNS.length)], b, b);
region.put(put);
count++;
}
@@ -103,11 +103,13 @@ private int addWideContent(HRegion region, byte[] family)
public void testWideScanBatching() throws IOException {
try {
this.r = createNewHRegion(REGION_INFO.getTableDesc(), null, null);
- int inserted = addWideContent(this.r, HConstants.CATALOG_FAMILY);
+ int inserted = addWideContent(this.r);
List<KeyValue> results = new ArrayList<KeyValue>();
Scan scan = new Scan();
- scan.addFamily(HConstants.CATALOG_FAMILY);
- scan.setBatch(BATCH);
+ scan.addFamily(A);
+ scan.addFamily(B);
+ scan.addFamily(C);
+ scan.setBatch(1000);
InternalScanner s = r.getScanner(scan);
int total = 0;
int i = 0;
@@ -117,8 +119,8 @@ public void testWideScanBatching() throws IOException {
i++;
LOG.info("iteration #" + i + ", results.size=" + results.size());
- // assert that the result set is no larger than BATCH
- assertTrue(results.size() <= BATCH);
+ // assert that the result set is no larger than 1000
+ assertTrue(results.size() <= 1000);
total += results.size();
|
e5cf1be77c4ef3acf89436789f96fff8c081964f
|
drools
|
BZ743283: Decision tables should support timer- column instead of deprecated duration attribute--
|
a
|
https://github.com/kiegroup/drools
|
diff --git a/drools-decisiontables/src/main/java/org/drools/decisiontable/parser/ActionType.java b/drools-decisiontables/src/main/java/org/drools/decisiontable/parser/ActionType.java
index e4ae8fc68f1..73cb78135c8 100644
--- a/drools-decisiontables/src/main/java/org/drools/decisiontable/parser/ActionType.java
+++ b/drools-decisiontables/src/main/java/org/drools/decisiontable/parser/ActionType.java
@@ -38,6 +38,8 @@ public enum Code {
DESCRIPTION( "DESCRIPTION", "I" ),
SALIENCE( "PRIORITY", "P", 1 ),
DURATION( "DURATION", "D", 1 ),
+ TIMER( "TIMER", "T", 1 ),
+ CALENDARS( "CALENDARS", "E", 1 ),
NOLOOP( "NO-LOOP", "U", 1 ),
LOCKONACTIVE( "LOCK-ON-ACTIVE", "L", 1 ),
AUTOFOCUS( "AUTO-FOCUS", "F", 1 ),
diff --git a/drools-decisiontables/src/main/java/org/drools/decisiontable/parser/DefaultRuleSheetListener.java b/drools-decisiontables/src/main/java/org/drools/decisiontable/parser/DefaultRuleSheetListener.java
index 18e87a9dfd5..1b9cfa9d465 100644
--- a/drools-decisiontables/src/main/java/org/drools/decisiontable/parser/DefaultRuleSheetListener.java
+++ b/drools-decisiontables/src/main/java/org/drools/decisiontable/parser/DefaultRuleSheetListener.java
@@ -211,6 +211,12 @@ private Package buildRuleSet() {
getProperties().getSinglePropertyCell( code.getColHeader() ) );
}
break;
+ case TIMER:
+ ruleset.setTimer( value );
+ break;
+ case CALENDARS:
+ ruleset.setCalendars( value );
+ break;
case NOLOOP:
ruleset.setNoLoop( RuleSheetParserUtil.isStringMeaningTrue( value ) );
break;
@@ -628,6 +634,12 @@ private void nextDataCell(final int row,
RuleSheetParserUtil.rc2name( row, column ) );
}
break;
+ case TIMER:
+ this._currentRule.setTimer( value );
+ break;
+ case CALENDARS:
+ this._currentRule.setCalendars( value );
+ break;
}
}
diff --git a/drools-decisiontables/src/test/java/org/drools/decisiontable/SpreadsheetCompilerUnitTest.java b/drools-decisiontables/src/test/java/org/drools/decisiontable/SpreadsheetCompilerUnitTest.java
index 34afbbabd51..eed5edcee92 100644
--- a/drools-decisiontables/src/test/java/org/drools/decisiontable/SpreadsheetCompilerUnitTest.java
+++ b/drools-decisiontables/src/test/java/org/drools/decisiontable/SpreadsheetCompilerUnitTest.java
@@ -16,16 +16,16 @@
package org.drools.decisiontable;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
import java.io.InputStream;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.*;
-
import org.drools.decisiontable.parser.RuleMatrixSheetListener;
+import org.junit.Test;
/**
*
@@ -154,5 +154,61 @@ public void testDeclaresCSV() {
assertTrue( drl.indexOf( "declare Smurf name : String end" ) > -1 );
}
+ @Test
+ public void testAttributesXLS() {
+ final SpreadsheetCompiler converter = new SpreadsheetCompiler();
+ String drl = converter.compile( "Attributes.xls",
+ InputType.XLS );
+
+ assertNotNull( drl );
+
+ int rule1 = drl.indexOf( "rule \"N1\"" );
+ assertFalse( rule1 == -1 );
+
+ assertTrue( drl.indexOf( "no-loop true",
+ rule1 ) > -1 );
+ assertTrue( drl.indexOf( "duration 100",
+ rule1 ) > -1 );
+ assertTrue( drl.indexOf( "salience 1",
+ rule1 ) > -1 );
+ assertTrue( drl.indexOf( "ruleflow-group \"RFG1\"",
+ rule1 ) > -1 );
+ assertTrue( drl.indexOf( "agenda-group \"AG1\"",
+ rule1 ) > -1 );
+ assertTrue( drl.indexOf( "timer (T1)",
+ rule1 ) > -1 );
+ assertTrue( drl.indexOf( "lock-on-active true",
+ rule1 ) > -1 );
+ assertTrue( drl.indexOf( "activation-group \"g1\"",
+ rule1 ) > -1 );
+ assertTrue( drl.indexOf( "auto-focus true",
+ rule1 ) > -1 );
+ assertTrue( drl.indexOf( "calendars \"CAL1\"",
+ rule1 ) > -1 );
+
+ int rule2 = drl.indexOf( "rule \"N2\"" );
+ assertFalse( rule2 == -1 );
+
+ assertTrue( drl.indexOf( "no-loop false",
+ rule2 ) > -1 );
+ assertTrue( drl.indexOf( "duration 200",
+ rule2 ) > -1 );
+ assertTrue( drl.indexOf( "salience 2",
+ rule2 ) > -1 );
+ assertTrue( drl.indexOf( "ruleflow-group \"RFG2\"",
+ rule2 ) > -1 );
+ assertTrue( drl.indexOf( "agenda-group \"AG2\"",
+ rule2 ) > -1 );
+ assertTrue( drl.indexOf( "timer (T2)",
+ rule2 ) > -1 );
+ assertTrue( drl.indexOf( "lock-on-active false",
+ rule2 ) > -1 );
+ assertTrue( drl.indexOf( "activation-group \"g2\"",
+ rule2 ) > -1 );
+ assertTrue( drl.indexOf( "auto-focus false",
+ rule2 ) > -1 );
+ assertTrue( drl.indexOf( "calendars \"CAL2\"",
+ rule2 ) > -1 );
+ }
}
diff --git a/drools-decisiontables/src/test/java/org/drools/decisiontable/parser/ActionTypeTest.java b/drools-decisiontables/src/test/java/org/drools/decisiontable/parser/ActionTypeTest.java
index 13dcf1710da..cbfc6e9644a 100644
--- a/drools-decisiontables/src/test/java/org/drools/decisiontable/parser/ActionTypeTest.java
+++ b/drools-decisiontables/src/test/java/org/drools/decisiontable/parser/ActionTypeTest.java
@@ -15,18 +15,117 @@ public class ActionTypeTest {
@Test
public void testChooseActionType() {
+
Map<Integer, ActionType> actionTypeMap = new HashMap<Integer, ActionType>();
ActionType.addNewActionType( actionTypeMap, "C", 0, 1 );
-
ActionType type = (ActionType) actionTypeMap.get( new Integer(0) );
assertEquals( Code.CONDITION, type.getCode() );
-
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "CONDITION", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.CONDITION, type.getCode());
actionTypeMap = new HashMap<Integer, ActionType>();
ActionType.addNewActionType( actionTypeMap, "A", 0, 1 );
type = (ActionType) actionTypeMap.get( new Integer(0) );
assertEquals(Code.ACTION, type.getCode());
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "ACTION", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.ACTION, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "N", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.NAME, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "NAME", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.NAME, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "I", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.DESCRIPTION, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "DESCRIPTION", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.DESCRIPTION, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "P", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.SALIENCE, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "PRIORITY", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.SALIENCE, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "D", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.DURATION, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "DURATION", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.DURATION, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "T", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.TIMER, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "TIMER", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.TIMER, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "E", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.CALENDARS, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "CALENDARS", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.CALENDARS, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "U", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.NOLOOP, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "NO-LOOP", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.NOLOOP, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "L", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.LOCKONACTIVE, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "LOCK-ON-ACTIVE", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.LOCKONACTIVE, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "F", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.AUTOFOCUS, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "AUTO-FOCUS", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.AUTOFOCUS, type.getCode());
+
actionTypeMap = new HashMap<Integer, ActionType>();
ActionType.addNewActionType( actionTypeMap, "X", 0, 1 );
type = (ActionType) actionTypeMap.get( new Integer(0) );
@@ -36,16 +135,37 @@ public void testChooseActionType() {
ActionType.addNewActionType( actionTypeMap, "ACTIVATION-GROUP", 0, 1 );
type = (ActionType) actionTypeMap.get( new Integer(0) );
assertEquals(Code.ACTIVATIONGROUP, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "G", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.AGENDAGROUP, type.getCode());
actionTypeMap = new HashMap<Integer, ActionType>();
- ActionType.addNewActionType( actionTypeMap, "NO-LOOP", 0, 1 );
+ ActionType.addNewActionType( actionTypeMap, "AGENDA-GROUP", 0, 1 );
type = (ActionType) actionTypeMap.get( new Integer(0) );
- assertEquals(Code.NOLOOP, type.getCode());
+ assertEquals(Code.AGENDAGROUP, type.getCode());
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "R", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.RULEFLOWGROUP, type.getCode());
+
actionTypeMap = new HashMap<Integer, ActionType>();
ActionType.addNewActionType( actionTypeMap, "RULEFLOW-GROUP", 0, 1 );
type = (ActionType) actionTypeMap.get( new Integer(0) );
assertEquals(Code.RULEFLOWGROUP, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "@", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.METADATA, type.getCode());
+
+ actionTypeMap = new HashMap<Integer, ActionType>();
+ ActionType.addNewActionType( actionTypeMap, "METADATA", 0, 1 );
+ type = (ActionType) actionTypeMap.get( new Integer(0) );
+ assertEquals(Code.METADATA, type.getCode());
+
}
}
diff --git a/drools-decisiontables/src/test/resources/org/drools/decisiontable/Attributes.xls b/drools-decisiontables/src/test/resources/org/drools/decisiontable/Attributes.xls
new file mode 100644
index 00000000000..3159e4ffeb6
Binary files /dev/null and b/drools-decisiontables/src/test/resources/org/drools/decisiontable/Attributes.xls differ
diff --git a/drools-templates/src/main/java/org/drools/template/model/AttributedDRLElement.java b/drools-templates/src/main/java/org/drools/template/model/AttributedDRLElement.java
index 9d7dc79663a..b46e25c7c45 100644
--- a/drools-templates/src/main/java/org/drools/template/model/AttributedDRLElement.java
+++ b/drools-templates/src/main/java/org/drools/template/model/AttributedDRLElement.java
@@ -53,6 +53,14 @@ protected String asStringLiteral( String value ){
return '"' + value.replaceAll( "\"", Matcher.quoteReplacement( "\\\"" ) ) + '"';
}
+ protected String asTimerLiteral( String value ){
+ // Keep the brackets if they come in the right places.
+ if( value.startsWith( "(" ) && value.endsWith( ")" ) && value.length() >= 2 ){
+ value = value.substring( 1, value.length() - 1 );
+ }
+ return "(" + value+ ")";
+ }
+
public void setSalience( final Integer value ){
this._attr2value.put( "salience", Integer.toString( value ) );
}
@@ -65,6 +73,14 @@ public void setDuration(final Long value) {
this._attr2value.put( "duration", Long.toString( value ) );
}
+ public void setTimer(final String value) {
+ this._attr2value.put( "timer", asTimerLiteral( value ) );
+ }
+
+ public void setCalendars(final String value) {
+ this._attr2value.put( "calendars", asStringLiteral( value ) );
+ }
+
public void setActivationGroup(final String value) {
this._attr2value.put( "activation-group", asStringLiteral( value ) );
}
|
43719c7dfd29922dd45a007923405958aa540f2d
|
ReactiveX-RxJava
|
---
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-contrib/rxjava-quasar/src/main/java/rx/quasar/ChannelObservable.java b/rxjava-contrib/rxjava-quasar/src/main/java/rx/quasar/ChannelObservable.java
index 67d09dde4a..45bcaa01c6 100644
--- a/rxjava-contrib/rxjava-quasar/src/main/java/rx/quasar/ChannelObservable.java
+++ b/rxjava-contrib/rxjava-quasar/src/main/java/rx/quasar/ChannelObservable.java
@@ -24,10 +24,6 @@
import rx.Observable;
import rx.Observer;
import rx.Scheduler;
-import rx.util.functions.Action2;
-import rx.util.functions.Actions;
-import rx.util.functions.Func1;
-import rx.util.functions.Functions;
/**
* This class contains static methods that connect {@link Observable}s and {@link Channel}s.
@@ -121,21 +117,6 @@ public void onError(Throwable e) {
*/
public final static <T> ReceivePort<T> subscribe(int bufferSize, Channels.OverflowPolicy policy, Observable<T> o) {
final Channel<T> channel = Channels.newChannel(bufferSize, policy);
-
- System.out.println(Functions.fromFunc(new Func1<String, String>() {
-
- @Override
- public String call(String t1) {
- throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
- }
- }));
- System.out.println(Actions.toFunc(new Action2<String, String>() {
-
- @Override
- public void call(String t1, String t2) {
- throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
- }
- }));
o.subscribe(new Observer<T>() {
@Override
|
9d6c63f8bee34431384adfb20add2abd5b6aa9c0
|
hadoop
|
YARN-3613. TestContainerManagerSecurity should init- and start Yarn cluster in setup instead of individual methods. (nijel via- kasha)--(cherry picked from commit fe0df596271340788095cb43a1944e19ac4c2cf7)-
|
p
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index d5d57a78a9e56..39f1bd6a4dac0 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -185,6 +185,9 @@ Release 2.8.0 - UNRELEASED
YARN-3513. Remove unused variables in ContainersMonitorImpl and add debug
log for overall resource usage by all containers. (Naganarasimha G R via devaraj)
+ YARN-3613. TestContainerManagerSecurity should init and start Yarn cluster in
+ setup instead of individual methods. (nijel via kasha)
+
OPTIMIZATIONS
YARN-3339. TestDockerContainerExecutor should pull a single image and not
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java
index f0dcb562a234c..59bb6aaba1313 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java
@@ -82,8 +82,6 @@
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
-import com.google.common.io.ByteArrayDataInput;
-import com.google.common.io.ByteStreams;
@RunWith(Parameterized.class)
public class TestContainerManagerSecurity extends KerberosSecurityTestcase {
@@ -105,10 +103,20 @@ public void setUp() throws Exception {
testRootDir.mkdirs();
httpSpnegoKeytabFile.deleteOnExit();
getKdc().createPrincipal(httpSpnegoKeytabFile, httpSpnegoPrincipal);
+
+ yarnCluster =
+ new MiniYARNCluster(TestContainerManagerSecurity.class.getName(), 1, 1,
+ 1);
+ yarnCluster.init(conf);
+ yarnCluster.start();
}
@After
public void tearDown() {
+ if (yarnCluster != null) {
+ yarnCluster.stop();
+ yarnCluster = null;
+ }
testRootDir.delete();
}
@@ -144,11 +152,6 @@ public TestContainerManagerSecurity(Configuration conf) {
@Test (timeout = 120000)
public void testContainerManager() throws Exception {
- try {
- yarnCluster = new MiniYARNCluster(TestContainerManagerSecurity.class
- .getName(), 1, 1, 1);
- yarnCluster.init(conf);
- yarnCluster.start();
// TestNMTokens.
testNMTokens(conf);
@@ -156,36 +159,11 @@ public void testContainerManager() throws Exception {
// Testing for container token tampering
testContainerToken(conf);
- } catch (Exception e) {
- e.printStackTrace();
- throw e;
- } finally {
- if (yarnCluster != null) {
- yarnCluster.stop();
- yarnCluster = null;
- }
- }
- }
-
- @Test (timeout = 120000)
- public void testContainerManagerWithEpoch() throws Exception {
- try {
- yarnCluster = new MiniYARNCluster(TestContainerManagerSecurity.class
- .getName(), 1, 1, 1);
- yarnCluster.init(conf);
- yarnCluster.start();
-
- // Testing for container token tampering
+ // Testing for container token tampering with epoch
testContainerTokenWithEpoch(conf);
- } finally {
- if (yarnCluster != null) {
- yarnCluster.stop();
- yarnCluster = null;
- }
- }
}
-
+
private void testNMTokens(Configuration conf) throws Exception {
NMTokenSecretManagerInRM nmTokenSecretManagerRM =
yarnCluster.getResourceManager().getRMContext()
|
635b24fec892a394fba1eb9154a3c6888020f419
|
restlet-framework-java
|
- Updated Jetty to version 7.0 RC4.- Upgraded the Jetty extension: replaced "headerBufferSize" parameter- by "requestHeaderSize" and "responseHeaderSize" parameters. - Removed "lowThreads" parameter no longer available.--
|
p
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/build/tmpl/text/changes.txt b/build/tmpl/text/changes.txt
index f56805331c..2540f46b00 100644
--- a/build/tmpl/text/changes.txt
+++ b/build/tmpl/text/changes.txt
@@ -35,6 +35,10 @@ Changes log
- Updated Velocity to version 1.6.2.
- Updated JiBX to version 1.2.1.
- Updated db4o to version 7.10.96.
+ - Updated Jetty to version 7.0 RC4. Upgraded the Jetty
+ extension: replaced "headerBufferSize" parameter by
+ "requestHeaderSize" and "responseHeaderSize" parameters.
+ Removed "lowThreads" parameter no longer available.
- Updated the Javadocs of the API to indicate the mapping
between properties/classes and HTTP headers.
- Updated the "Server" HTTP header returned by default to
diff --git a/libraries/org.eclipse.jetty_7.0/.classpath b/libraries/org.eclipse.jetty_7.0/.classpath
index 50e4634365..5a7b154d16 100644
--- a/libraries/org.eclipse.jetty_7.0/.classpath
+++ b/libraries/org.eclipse.jetty_7.0/.classpath
@@ -1,5 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
+ <classpathentry exported="true" kind="lib" path="org.eclipse.jetty.continuations.jar"/>
<classpathentry exported="true" kind="lib" path="org.eclipse.jetty.io.jar"/>
<classpathentry exported="true" kind="lib" path="org.eclipse.jetty.ajp.jar"/>
<classpathentry exported="true" kind="lib" path="org.eclipse.jetty.http.jar"/>
diff --git a/libraries/org.eclipse.jetty_7.0/META-INF/MANIFEST.MF b/libraries/org.eclipse.jetty_7.0/META-INF/MANIFEST.MF
index 6e56bd48dc..6c08541f17 100644
--- a/libraries/org.eclipse.jetty_7.0/META-INF/MANIFEST.MF
+++ b/libraries/org.eclipse.jetty_7.0/META-INF/MANIFEST.MF
@@ -7,7 +7,8 @@ Bundle-ClassPath: org.eclipse.jetty.ajp.jar,
org.eclipse.jetty.http.jar,
org.eclipse.jetty.server.jar,
org.eclipse.jetty.util.jar,
- org.eclipse.jetty.io.jar
+ org.eclipse.jetty.io.jar,
+ org.eclipse.jetty.continuations.jar
Bundle-Vendor: Eclipse Foundation
Bundle-RequiredExecutionEnvironment: J2SE-1.5
Import-Package: javax.servlet,
@@ -19,6 +20,7 @@ Export-Package: org.eclipse.jetty.ajp;
org.eclipse.jetty.io,
org.eclipse.jetty.http,
org.eclipse.jetty.server.bio",
+ org.eclipse.jetty.continuation;uses:="javax.servlet,org.mortbay.util.ajax",
org.eclipse.jetty.http;
uses:="org.eclipse.jetty.io,
org.eclipse.jetty.util.resource,
diff --git a/libraries/org.eclipse.jetty_7.0/build.properties b/libraries/org.eclipse.jetty_7.0/build.properties
index ccc3c69b2c..6d62fe8c00 100644
--- a/libraries/org.eclipse.jetty_7.0/build.properties
+++ b/libraries/org.eclipse.jetty_7.0/build.properties
@@ -3,5 +3,6 @@ bin.includes = META-INF/,\
org.eclipse.jetty.http.jar,\
org.eclipse.jetty.server.jar,\
org.eclipse.jetty.util.jar,\
- org.eclipse.jetty.io.jar
+ org.eclipse.jetty.io.jar,\
+ org.eclipse.jetty.continuations.jar
diff --git a/libraries/org.eclipse.jetty_7.0/org.eclipse.jetty.continuations.jar b/libraries/org.eclipse.jetty_7.0/org.eclipse.jetty.continuations.jar
new file mode 100644
index 0000000000..e816cb7725
Binary files /dev/null and b/libraries/org.eclipse.jetty_7.0/org.eclipse.jetty.continuations.jar differ
diff --git a/libraries/org.mortbay.jetty_6.1/.classpath b/libraries/org.mortbay.jetty_6.1/.classpath
deleted file mode 100644
index a0bab312d0..0000000000
--- a/libraries/org.mortbay.jetty_6.1/.classpath
+++ /dev/null
@@ -1,10 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.5"/>
- <classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
- <classpathentry exported="true" kind="lib" path="org.mortbay.jetty.util.jar" sourcepath="org.mortbay.jetty_6.1src.zip"/>
- <classpathentry exported="true" kind="lib" path="org.mortbay.jetty.jar" sourcepath="org.mortbay.jetty_6.1src.zip"/>
- <classpathentry exported="true" kind="lib" path="org.mortbay.jetty.ajp.jar" sourcepath="org.mortbay.jetty_6.1src.zip"/>
- <classpathentry exported="true" kind="lib" path="org.mortbay.jetty.https.jar" sourcepath="org.mortbay.jetty_6.1src.zip"/>
- <classpathentry kind="output" path="bin"/>
-</classpath>
diff --git a/libraries/org.mortbay.jetty_6.1/.project b/libraries/org.mortbay.jetty_6.1/.project
deleted file mode 100644
index b569941a8b..0000000000
--- a/libraries/org.mortbay.jetty_6.1/.project
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
- <name>org.mortbay.jetty</name>
- <comment></comment>
- <projects>
- </projects>
- <buildSpec>
- <buildCommand>
- <name>org.eclipse.jdt.core.javabuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.pde.ManifestBuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.pde.SchemaBuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- </buildSpec>
- <natures>
- <nature>org.eclipse.pde.PluginNature</nature>
- <nature>org.eclipse.jdt.core.javanature</nature>
- </natures>
-</projectDescription>
diff --git a/libraries/org.mortbay.jetty_6.1/META-INF/MANIFEST.MF b/libraries/org.mortbay.jetty_6.1/META-INF/MANIFEST.MF
deleted file mode 100644
index c1e1ab0ba8..0000000000
--- a/libraries/org.mortbay.jetty_6.1/META-INF/MANIFEST.MF
+++ /dev/null
@@ -1,94 +0,0 @@
-Manifest-Version: 1.0
-Bundle-ManifestVersion: 2
-Bundle-Name: Jetty HTTP server 6.1
-Bundle-SymbolicName: org.mortbay.jetty
-Bundle-Version: 6.1.18
-Bundle-ClassPath: org.mortbay.jetty.util.jar,
- org.mortbay.jetty.jar, org.mortbay.jetty.ajp.jar, org.mortbay.jetty.https.jar
-Bundle-Vendor: Mortbay Consulting
-Export-Package: org.mortbay.component;uses:="new org.mortbay.component",
- org.mortbay.io;uses:="org.mortbay.util",
- org.mortbay.io.bio;uses:="org.mortbay.io",
- org.mortbay.io.nio;uses:="org.mortbay.io,org.mortbay.thread,org.mortbay.component",
- org.mortbay.jetty;
- uses:="org.mortbay.io,
- new org.mortbay.jetty,
- org.mortbay.jetty.security,
- org.mortbay.jetty.handler,
- org.mortbay.resource,
- javax.servlet,
- org.mortbay.jetty.servlet,
- org.mortbay.util.ajax,
- org.mortbay.thread,
- org.mortbay.component,
- org.mortbay.util,
- javax.servlet.http",
- org.mortbay.jetty.ajp;
- uses:="org.mortbay.jetty,
- javax.servlet,
- org.mortbay.jetty.bio,
- org.mortbay.io",
- org.mortbay.jetty.bio;uses:="org.mortbay.jetty,org.mortbay.io,org.mortbay.io.bio",
- org.mortbay.jetty.deployer;
- uses:="org.mortbay.jetty,
- org.mortbay.resource,
- org.mortbay.jetty.handler,
- org.mortbay.component,
- org.mortbay.util",
- org.mortbay.jetty.handler;
- uses:="org.mortbay.jetty,
- org.mortbay.log,
- org.mortbay.io,
- org.mortbay.resource,
- javax.servlet,
- org.mortbay.jetty.servlet,
- org.mortbay.component,
- org.mortbay.util,
- javax.servlet.http",
- org.mortbay.jetty.nio;
- uses:="org.mortbay.jetty,
- org.mortbay.io.nio,
- org.mortbay.util.ajax,
- org.mortbay.io,
- org.mortbay.thread",
- org.mortbay.jetty.security;
- uses:="org.mortbay.jetty,
- org.mortbay.io.nio,
- org.mortbay.io,
- javax.net.ssl,
- org.mortbay.jetty.nio",
- org.mortbay.jetty.servlet;
- uses:="org.mortbay.jetty,
- org.mortbay.io,
- org.mortbay.jetty.security,
- new org.mortbay.jetty.servlet,
- org.mortbay.jetty.handler,
- org.mortbay.resource,
- javax.servlet,
- org.mortbay.util,
- org.mortbay.component,
- javax.servlet.http",
- org.mortbay.jetty.webapp;
- uses:="org.mortbay.jetty,
- org.mortbay.jetty.security,
- org.mortbay.xml,
- org.mortbay.jetty.handler,
- org.mortbay.resource,
- org.mortbay.jetty.servlet,
- javax.servlet.http",
- org.mortbay.log,
- org.mortbay.resource,
- org.mortbay.servlet;uses:="javax.servlet,org.mortbay.util,javax.servlet.http",
- org.mortbay.servlet.jetty;uses:="org.mortbay.servlet,javax.servlet.http",
- org.mortbay.thread;uses:="org.mortbay.component",
- org.mortbay.util;uses:="org.mortbay.thread",
- org.mortbay.util.ajax;
- uses:="javax.servlet,
- org.mortbay.util,
- new org.mortbay.util.ajax,
- javax.servlet.http",
- org.mortbay.xml;uses:="javax.xml.parsers,org.xml.sax"
-Bundle-RequiredExecutionEnvironment: J2SE-1.5
-Import-Package: javax.servlet,
- javax.servlet.http,
- javax.servlet.resources
diff --git a/libraries/org.mortbay.jetty_6.1/build.properties b/libraries/org.mortbay.jetty_6.1/build.properties
deleted file mode 100644
index 3674e8ac5a..0000000000
--- a/libraries/org.mortbay.jetty_6.1/build.properties
+++ /dev/null
@@ -1,5 +0,0 @@
-bin.includes = META-INF/,\
- org.mortbay.jetty.util.jar,\
- org.mortbay.jetty.jar,\
- org.mortbay.jetty.ajp.jar,\
- org.mortbay.jetty.https.jar
diff --git a/libraries/org.mortbay.jetty_6.1/library.xml b/libraries/org.mortbay.jetty_6.1/library.xml
deleted file mode 100644
index 643033acc7..0000000000
--- a/libraries/org.mortbay.jetty_6.1/library.xml
+++ /dev/null
@@ -1,17 +0,0 @@
-<library id="jetty">
- <package name="org.mortbay.jetty" />
- <version>6.1</version>
- <release>18</release>
- <distributions>
- <distribution id="classic" />
- </distributions>
- <homeUri>
- http://www.mortbay.org/jetty/
- </homeUri>
- <downloadUri>
- http://docs.codehaus.org/display/JETTY/Downloading+Jetty
- </downloadUri>
- <javadocs>
- <link href="http://jetty.mortbay.org/apidocs/" />
- </javadocs>
-</library>
diff --git a/libraries/org.mortbay.jetty_6.1/license.txt b/libraries/org.mortbay.jetty_6.1/license.txt
deleted file mode 100644
index d645695673..0000000000
--- a/libraries/org.mortbay.jetty_6.1/license.txt
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/libraries/org.mortbay.jetty_6.1/org.mortbay.jetty.ajp.jar b/libraries/org.mortbay.jetty_6.1/org.mortbay.jetty.ajp.jar
deleted file mode 100644
index 5cb25ae526..0000000000
Binary files a/libraries/org.mortbay.jetty_6.1/org.mortbay.jetty.ajp.jar and /dev/null differ
diff --git a/libraries/org.mortbay.jetty_6.1/org.mortbay.jetty.https.jar b/libraries/org.mortbay.jetty_6.1/org.mortbay.jetty.https.jar
deleted file mode 100644
index 3f690c09f1..0000000000
Binary files a/libraries/org.mortbay.jetty_6.1/org.mortbay.jetty.https.jar and /dev/null differ
diff --git a/libraries/org.mortbay.jetty_6.1/org.mortbay.jetty.jar b/libraries/org.mortbay.jetty_6.1/org.mortbay.jetty.jar
deleted file mode 100644
index 0fea05246f..0000000000
Binary files a/libraries/org.mortbay.jetty_6.1/org.mortbay.jetty.jar and /dev/null differ
diff --git a/libraries/org.mortbay.jetty_6.1/org.mortbay.jetty.util.jar b/libraries/org.mortbay.jetty_6.1/org.mortbay.jetty.util.jar
deleted file mode 100644
index dbe3cf79c2..0000000000
Binary files a/libraries/org.mortbay.jetty_6.1/org.mortbay.jetty.util.jar and /dev/null differ
diff --git a/libraries/org.mortbay.jetty_6.1/readme.txt b/libraries/org.mortbay.jetty_6.1/readme.txt
deleted file mode 100644
index 6b3b5275c0..0000000000
--- a/libraries/org.mortbay.jetty_6.1/readme.txt
+++ /dev/null
@@ -1,13 +0,0 @@
--------------
-Mortbay Jetty
--------------
-
-"Jetty is an open-source, standards-based, full-featured web server implemented
-entirely in java. It is released under the Apache 2.0 licence and is therefore
-free for commercial use and distribution. First created in 1995, Jetty has
-benefitted from input from a vast user community and consistent and focussed
-development by a stable core of lead developers. Full commercial 24x7 support,
-training and development services for Jetty are available from Webtide."
-
-For more information:
-http://jetty.mortbay.org/
\ No newline at end of file
diff --git a/modules/org.restlet.ext.jetty/META-INF/MANIFEST.MF b/modules/org.restlet.ext.jetty/META-INF/MANIFEST.MF
index 219c9eb1ae..ca00a991a0 100644
--- a/modules/org.restlet.ext.jetty/META-INF/MANIFEST.MF
+++ b/modules/org.restlet.ext.jetty/META-INF/MANIFEST.MF
@@ -15,14 +15,23 @@ Export-Package: org.restlet.ext.jetty;
Import-Package: javax.servlet,
javax.servlet.http,
javax.servlet.resources,
- org.mortbay.component,
- org.mortbay.jetty,
- org.mortbay.jetty.ajp,
- org.mortbay.jetty.bio,
- org.mortbay.jetty.handler,
- org.mortbay.jetty.nio,
- org.mortbay.jetty.security,
- org.mortbay.thread,
+ org.eclipse.jetty.ajp,
+ org.eclipse.jetty.http,
+ org.eclipse.jetty.http.security,
+ org.eclipse.jetty.http.ssl,
+ org.eclipse.jetty.io,
+ org.eclipse.jetty.server,
+ org.eclipse.jetty.server.bio,
+ org.eclipse.jetty.server.handler,
+ org.eclipse.jetty.server.nio,
+ org.eclipse.jetty.server.session,
+ org.eclipse.jetty.server.ssl,
+ org.eclipse.jetty.util,
+ org.eclipse.jetty.util.ajax,
+ org.eclipse.jetty.util.component,
+ org.eclipse.jetty.util.log,
+ org.eclipse.jetty.util.resource,
+ org.eclipse.jetty.util.thread,
org.restlet,
org.restlet.data,
org.restlet.engine,
diff --git a/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/AjpServerHelper.java b/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/AjpServerHelper.java
index da68aae8fc..fc737bdbd1 100644
--- a/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/AjpServerHelper.java
+++ b/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/AjpServerHelper.java
@@ -30,8 +30,8 @@
package org.restlet.ext.jetty;
-import org.mortbay.jetty.AbstractConnector;
-import org.mortbay.jetty.ajp.Ajp13SocketConnector;
+import org.eclipse.jetty.ajp.Ajp13SocketConnector;
+import org.eclipse.jetty.server.AbstractConnector;
import org.restlet.Server;
import org.restlet.data.Protocol;
diff --git a/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/HttpServerHelper.java b/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/HttpServerHelper.java
index 788d728ca1..e3083d7aba 100644
--- a/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/HttpServerHelper.java
+++ b/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/HttpServerHelper.java
@@ -30,10 +30,10 @@
package org.restlet.ext.jetty;
-import org.mortbay.jetty.AbstractConnector;
-import org.mortbay.jetty.bio.SocketConnector;
-import org.mortbay.jetty.nio.BlockingChannelConnector;
-import org.mortbay.jetty.nio.SelectChannelConnector;
+import org.eclipse.jetty.server.AbstractConnector;
+import org.eclipse.jetty.server.bio.SocketConnector;
+import org.eclipse.jetty.server.nio.BlockingChannelConnector;
+import org.eclipse.jetty.server.nio.SelectChannelConnector;
import org.restlet.Server;
import org.restlet.data.Protocol;
diff --git a/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/HttpsServerHelper.java b/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/HttpsServerHelper.java
index 54240656ba..3f8500dd22 100644
--- a/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/HttpsServerHelper.java
+++ b/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/HttpsServerHelper.java
@@ -35,15 +35,14 @@
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLServerSocketFactory;
-import org.mortbay.jetty.AbstractConnector;
-import org.mortbay.jetty.security.SslSelectChannelConnector;
-import org.mortbay.jetty.security.SslSocketConnector;
+import org.eclipse.jetty.server.AbstractConnector;
+import org.eclipse.jetty.server.ssl.SslSelectChannelConnector;
+import org.eclipse.jetty.server.ssl.SslSocketConnector;
import org.restlet.Server;
import org.restlet.data.Protocol;
import org.restlet.engine.http.HttpsUtils;
import org.restlet.engine.security.SslContextFactory;
-
/**
* Jetty HTTPS server connector. Here is the list of additional parameters that
* are supported:
@@ -140,7 +139,9 @@
* </tr>
* </table>
*
- * @see <a href="http://docs.codehaus.org/display/JETTY/How+to+configure+SSL">How to configure SSL for Jetty</a>
+ * @see <a
+ * href="http://docs.codehaus.org/display/JETTY/How+to+configure+SSL">How
+ * to configure SSL for Jetty</a>
* @author Jerome Louvel
*/
public class HttpsServerHelper extends JettyServerHelper {
diff --git a/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/JettyCall.java b/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/JettyCall.java
index e555c89f60..d113540add 100644
--- a/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/JettyCall.java
+++ b/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/JettyCall.java
@@ -42,8 +42,8 @@
import java.util.List;
import java.util.logging.Level;
-import org.mortbay.jetty.EofException;
-import org.mortbay.jetty.HttpConnection;
+import org.eclipse.jetty.io.EofException;
+import org.eclipse.jetty.server.HttpConnection;
import org.restlet.Response;
import org.restlet.Server;
import org.restlet.data.Parameter;
@@ -153,7 +153,6 @@ public ReadableByteChannel getRequestHeadChannel() {
* @return The list of request headers.
*/
@Override
- @SuppressWarnings("unchecked")
public Series<Parameter> getRequestHeaders() {
final Series<Parameter> result = super.getRequestHeaders();
diff --git a/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/JettyHandler.java b/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/JettyHandler.java
index 0f3241783f..dd68342063 100644
--- a/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/JettyHandler.java
+++ b/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/JettyHandler.java
@@ -36,9 +36,9 @@
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
-import org.mortbay.jetty.HttpConnection;
-import org.mortbay.jetty.Request;
-import org.mortbay.jetty.handler.AbstractHandler;
+import org.eclipse.jetty.server.HttpConnection;
+import org.eclipse.jetty.server.Request;
+import org.eclipse.jetty.server.handler.AbstractHandler;
import org.restlet.Server;
/**
@@ -100,15 +100,16 @@ protected void doStop() throws Exception {
* The target of the request, either a URI or a name.
* @param request
* The Jetty request.
- * @param response
- * The Jetty response.
- * @param dispatch
- * The Jetty dispatch mode.
+ * @param servletRequest
+ * The Servlet request.
+ * @param servletResponse
+ * The Servlet response.
*/
- public void handle(String target, HttpServletRequest request,
- HttpServletResponse response, int dispatch) throws IOException,
+ public void handle(String target, Request arg1,
+ HttpServletRequest servletRequest,
+ HttpServletResponse servletResponse) throws IOException,
ServletException {
- final Request baseRequest = (request instanceof Request) ? (Request) request
+ final Request baseRequest = (servletRequest instanceof Request) ? (Request) servletRequest
: HttpConnection.getCurrentConnection().getRequest();
this.helper.handle(new JettyCall(this.helper.getHelped(),
HttpConnection.getCurrentConnection()));
diff --git a/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/JettyServerHelper.java b/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/JettyServerHelper.java
index a1ebeca86e..e54124708e 100644
--- a/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/JettyServerHelper.java
+++ b/modules/org.restlet.ext.jetty/src/org/restlet/ext/jetty/JettyServerHelper.java
@@ -34,10 +34,10 @@
import javax.servlet.ServletException;
-import org.mortbay.jetty.AbstractConnector;
-import org.mortbay.jetty.HttpConnection;
-import org.mortbay.jetty.Server;
-import org.mortbay.thread.QueuedThreadPool;
+import org.eclipse.jetty.server.AbstractConnector;
+import org.eclipse.jetty.server.HttpConnection;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.util.thread.QueuedThreadPool;
/**
* Abstract Jetty Web server connector. Here is the list of parameters that are
@@ -68,13 +68,6 @@
* <td>Time for an idle thread to wait for a request or read.</td>
* </tr>
* <tr>
- * <td>lowThreads</td>
- * <td>int</td>
- * <td>25</td>
- * <td>Threshold of remaining threads at which the server is considered as
- * running low on resources.</td>
- * </tr>
- * <tr>
* <td>lowResourceMaxIdleTimeMs</td>
* <td>int</td>
* <td>2500</td>
@@ -94,10 +87,16 @@
* <td>Size of the accept queue.</td>
* </tr>
* <tr>
- * <td>headerBufferSize</td>
+ * <td>requestHeaderSize</td>
+ * <td>int</td>
+ * <td>4*1024</td>
+ * <td>Size of the buffer to be used for request headers.</td>
+ * </tr>
+ * <tr>
+ * <td>responseHeaderSize</td>
* <td>int</td>
* <td>4*1024</td>
- * <td>Size of the buffer to be used for request and response headers.</td>
+ * <td>Size of the buffer to be used for response headers.</td>
* </tr>
* <tr>
* <td>requestBufferSize</td>
@@ -159,7 +158,7 @@ public abstract class JettyServerHelper extends
*
* @author Jerome Louvel
*/
- private static class WrappedServer extends org.mortbay.jetty.Server {
+ private static class WrappedServer extends org.eclipse.jetty.server.Server {
JettyServerHelper helper;
/**
@@ -218,7 +217,8 @@ protected void configure(AbstractConnector connector) {
connector.setLowResourceMaxIdleTime(getLowResourceMaxIdleTimeMs());
connector.setAcceptors(getAcceptorThreads());
connector.setAcceptQueueSize(getAcceptQueueSize());
- connector.setHeaderBufferSize(getHeaderBufferSize());
+ connector.setRequestHeaderSize(getRequestHeaderSize());
+ connector.setResponseHeaderSize(getResponseHeaderSize());
connector.setRequestBufferSize(getRequestBufferSize());
connector.setResponseBufferSize(getResponseBufferSize());
connector.setMaxIdleTime(getIoMaxIdleTimeMs());
@@ -263,18 +263,6 @@ public int getGracefulShutdown() {
"gracefulShutdown", "0"));
}
- /**
- * Returns the size of the buffer to be used for request and response
- * headers.
- *
- * @return The size of the buffer to be used for request and response
- * headers.
- */
- public int getHeaderBufferSize() {
- return Integer.parseInt(getHelpedParameters().getFirstValue(
- "headerBufferSize", Integer.toString(4 * 1024)));
- }
-
/**
* Returns the maximum time to wait on an idle IO operation.
*
@@ -297,18 +285,6 @@ public int getLowResourceMaxIdleTimeMs() {
"lowResourceMaxIdleTimeMs", "2500"));
}
- /**
- * Returns the threshold of remaining threads at which the server is
- * considered as running low on resources.
- *
- * @return The threshold of remaining threads at which the server is
- * considered as running low on resources.
- */
- public int getLowThreads() {
- return Integer.parseInt(getHelpedParameters().getFirstValue(
- "lowThreads", "25"));
- }
-
/**
* Returns the maximum threads that will service requests.
*
@@ -339,6 +315,16 @@ public int getRequestBufferSize() {
"requestBufferSize", Integer.toString(8 * 1024)));
}
+ /**
+ * Returns the size of the buffer to be used for request headers.
+ *
+ * @return The size of the buffer to be used for request headers.
+ */
+ public int getRequestHeaderSize() {
+ return Integer.parseInt(getHelpedParameters().getFirstValue(
+ "requestHeaderSize", Integer.toString(4 * 1024)));
+ }
+
/**
* Returns the size of the content buffer for sending responses.
*
@@ -349,6 +335,16 @@ public int getResponseBufferSize() {
"responseBufferSize", Integer.toString(32 * 1024)));
}
+ /**
+ * Returns the size of the buffer to be used for response headers.
+ *
+ * @return The size of the buffer to be used for response headers.
+ */
+ public int getResponseHeaderSize() {
+ return Integer.parseInt(getHelpedParameters().getFirstValue(
+ "responseHeaderSize", Integer.toString(4 * 1024)));
+ }
+
/**
* Returns the SO linger time (see Jetty 6 documentation).
*
@@ -379,8 +375,7 @@ protected Server getWrappedServer() {
this.wrappedServer = new WrappedServer(this);
// Configuring the thread pool
- final QueuedThreadPool btp = new QueuedThreadPool();
- btp.setLowThreads(getLowThreads());
+ QueuedThreadPool btp = new QueuedThreadPool();
btp.setMaxIdleTimeMs(getThreadMaxIdleTimeMs());
btp.setMaxThreads(getMaxThreads());
btp.setMinThreads(getMinThreads());
|
21ee77a4383f5c970e8c73967d38615f5bfb48af
|
camel
|
Checkstyle--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1228067 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/camel
|
diff --git a/components/camel-websocket/src/main/java/org/apache/camel/component/websocket/WebsocketConstants.java b/components/camel-websocket/src/main/java/org/apache/camel/component/websocket/WebsocketConstants.java
index e49dd3d85084d..91e4e0b902a95 100644
--- a/components/camel-websocket/src/main/java/org/apache/camel/component/websocket/WebsocketConstants.java
+++ b/components/camel-websocket/src/main/java/org/apache/camel/component/websocket/WebsocketConstants.java
@@ -16,12 +16,15 @@
*/
package org.apache.camel.component.websocket;
-public class WebsocketConstants {
+public final class WebsocketConstants {
+ public static final int DEFAULT_PORT = 9292;
public static final String CONNECTION_KEY = "websocket.connectionKey";
public static final String SEND_TO_ALL = "websocket.sendToAll";
-
public static final String DEFAULT_HOST = "0.0.0.0";
- public static final int DEFAULT_PORT = 9292;
+
+ private WebsocketConstants() {
+ };
+
}
diff --git a/components/camel-websocket/src/test/java/org/apache/camel/component/websocket/WebsocketConfigurationTest.java b/components/camel-websocket/src/test/java/org/apache/camel/component/websocket/WebsocketConfigurationTest.java
index 26a03865350d0..d542417a1f74f 100644
--- a/components/camel-websocket/src/test/java/org/apache/camel/component/websocket/WebsocketConfigurationTest.java
+++ b/components/camel-websocket/src/test/java/org/apache/camel/component/websocket/WebsocketConfigurationTest.java
@@ -23,7 +23,6 @@
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertEquals;
public class WebsocketConfigurationTest {
@@ -61,10 +60,6 @@ public void testParameters() throws Exception {
assertNotNull(websocketEndpoint);
assertNotNull(REMAINING);
assertNotNull(wsConfig.getGlobalStore());
- // System.out.println(URI);
- // System.out.println(component);
- // System.out.println(REMAINING);
- // System.out.println(wsConfig.getGlobalStore());
}
|
5dc029274e41fdf5f761b0207e1dc96a1af2cb96
|
ReactiveX-RxJava
|
Perf Tests with JMH--
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/perf/java/rx/jmh/Baseline.java b/rxjava-core/src/perf/java/rx/jmh/Baseline.java
new file mode 100644
index 0000000000..549118752f
--- /dev/null
+++ b/rxjava-core/src/perf/java/rx/jmh/Baseline.java
@@ -0,0 +1,35 @@
+package rx.jmh;
+
+import org.openjdk.jmh.annotations.GenerateMicroBenchmark;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.logic.BlackHole;
+
+import rx.functions.Func1;
+
+public class Baseline {
+
+ @GenerateMicroBenchmark
+ public void forLoopInvokingFunction(BlackHole bh, Input input) {
+ for (int value = 0; value < input.size; value++) {
+ bh.consume(IDENTITY_FUNCTION.call(value));
+ }
+ }
+
+ private static final Func1<Integer, Integer> IDENTITY_FUNCTION = new Func1<Integer, Integer>() {
+ @Override
+ public Integer call(Integer value) {
+ return value;
+ }
+ };
+
+ @State(Scope.Thread)
+ public static class Input {
+
+ @Param({ "1024", "1048576" })
+ public int size;
+
+ }
+
+}
diff --git a/rxjava-core/src/perf/java/rx/operators/OperatorMapPerf.java b/rxjava-core/src/perf/java/rx/operators/OperatorMapPerf.java
index 7ae07c68b1..70074b4b08 100644
--- a/rxjava-core/src/perf/java/rx/operators/OperatorMapPerf.java
+++ b/rxjava-core/src/perf/java/rx/operators/OperatorMapPerf.java
@@ -1,7 +1,5 @@
package rx.operators;
-import java.util.ArrayList;
-import java.util.Collection;
import java.util.concurrent.CountDownLatch;
import org.openjdk.jmh.annotations.GenerateMicroBenchmark;
@@ -21,14 +19,7 @@
public class OperatorMapPerf {
@GenerateMicroBenchmark
- public void measureBaseline(BlackHole bh, Input input) {
- for (Integer value : input.values) {
- bh.consume(IDENTITY_FUNCTION.call(value));
- }
- }
-
- @GenerateMicroBenchmark
- public void measureMap(Input input) throws InterruptedException {
+ public void mapIdentityFunction(Input input) throws InterruptedException {
input.observable.lift(MAP_OPERATOR).subscribe(input.observer);
input.awaitCompletion();
@@ -46,10 +37,9 @@ public Integer call(Integer value) {
@State(Scope.Thread)
public static class Input {
- @Param({"1", "1024", "1048576"})
+ @Param({ "1", "1024", "1048576" })
public int size;
- public Collection<Integer> values;
public Observable<Integer> observable;
public Observer<Integer> observer;
@@ -57,15 +47,10 @@ public static class Input {
@Setup
public void setup() {
- values = new ArrayList<Integer>();
- for(int i = 0; i < size; i ++) {
- values.add(i);
- }
-
observable = Observable.create(new OnSubscribe<Integer>() {
@Override
public void call(Subscriber<? super Integer> o) {
- for (Integer value : values) {
+ for (int value = 0; value < size; value++) {
if (o.isUnsubscribed())
return;
o.onNext(value);
diff --git a/rxjava-core/src/perf/java/rx/operators/OperatorSerializePerf.java b/rxjava-core/src/perf/java/rx/operators/OperatorSerializePerf.java
new file mode 100644
index 0000000000..a130c0711c
--- /dev/null
+++ b/rxjava-core/src/perf/java/rx/operators/OperatorSerializePerf.java
@@ -0,0 +1,86 @@
+package rx.operators;
+
+import java.util.concurrent.CountDownLatch;
+
+import org.openjdk.jmh.annotations.GenerateMicroBenchmark;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.logic.BlackHole;
+
+import rx.Observable;
+import rx.Observable.OnSubscribe;
+import rx.Observer;
+import rx.Subscriber;
+import rx.observers.TestSubscriber;
+
+public class OperatorSerializePerf {
+
+ @GenerateMicroBenchmark
+ public void noSerializationSingleThreaded(Input input) {
+ input.observable.subscribe(input.subscriber);
+ }
+
+ @GenerateMicroBenchmark
+ public void serializedSingleStream(Input input) {
+ input.observable.serialize().subscribe(input.subscriber);
+ }
+
+ @GenerateMicroBenchmark
+ public void synchronizedSingleStream(Input input) {
+ input.observable.synchronize().subscribe(input.subscriber);
+ }
+
+ @State(Scope.Thread)
+ public static class Input {
+
+ @Param({ "1024", "1048576" })
+ public int size;
+
+ public Observable<Integer> observable;
+ public TestSubscriber<Integer> subscriber;
+
+ private CountDownLatch latch;
+
+ @Setup
+ public void setup() {
+ observable = Observable.create(new OnSubscribe<Integer>() {
+ @Override
+ public void call(Subscriber<? super Integer> o) {
+ for (int value = 0; value < size; value++) {
+ if (o.isUnsubscribed())
+ return;
+ o.onNext(value);
+ }
+ o.onCompleted();
+ }
+ });
+
+ final BlackHole bh = new BlackHole();
+ latch = new CountDownLatch(1);
+
+ subscriber = new TestSubscriber<Integer>(new Observer<Integer>() {
+ @Override
+ public void onCompleted() {
+ latch.countDown();
+ }
+
+ @Override
+ public void onError(Throwable e) {
+ throw new RuntimeException(e);
+ }
+
+ @Override
+ public void onNext(Integer value) {
+ bh.consume(value);
+ }
+ });
+
+ }
+
+ public void awaitCompletion() throws InterruptedException {
+ latch.await();
+ }
+ }
+}
|
a768016779e3bdbddf310d2d861e74830a62d16c
|
elasticsearch
|
Allow to configure a common logger prefix using- `es.logger.prefix` system prop, closes -360.--
|
a
|
https://github.com/elastic/elasticsearch
|
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/common/logging/Loggers.java b/modules/elasticsearch/src/main/java/org/elasticsearch/common/logging/Loggers.java
index 74a5a3d518fe9..a19cac5adf1ea 100644
--- a/modules/elasticsearch/src/main/java/org/elasticsearch/common/logging/Loggers.java
+++ b/modules/elasticsearch/src/main/java/org/elasticsearch/common/logging/Loggers.java
@@ -40,6 +40,8 @@
*/
public class Loggers {
+ private final static String commonPrefix = System.getProperty("es.logger.prefix", "");
+
public static final String SPACE = " ";
private static boolean consoleLoggingEnabled = true;
@@ -152,6 +154,6 @@ private static String getLoggerName(String name) {
if (name.startsWith("org.elasticsearch.")) {
return name.substring("org.elasticsearch.".length());
}
- return name;
+ return commonPrefix + name;
}
}
|
1dc33f9f6d29c6b33de2023d4f2158e70a1c89aa
|
orientdb
|
UPDATE ADD now possible with subdocuments fields--
|
a
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLUpdate.java b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLUpdate.java
index df9e1a06034..94deb108910 100755
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLUpdate.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLUpdate.java
@@ -275,7 +275,13 @@ else if (returning.equalsIgnoreCase("AFTER"))
// IN ALL OTHER CASES USE A LIST
coll = new ArrayList<Object>();
- record.field(entry.getKey(), coll);
+ // containField's condition above does NOT check subdocument's fields so
+ Collection<Object> currColl = record.field(entry.getKey());
+ if (currColl==null)
+ record.field(entry.getKey(), coll);
+ else
+ coll = currColl;
+
} else {
fieldValue = record.field(entry.getKey());
|
d89b18613f26094eee45d664cc2a8e5fc9fcba16
|
spring-framework
|
Polish (minor)--
|
p
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-websocket/src/main/java/org/springframework/web/socket/sockjs/SockJsFrame.java b/spring-websocket/src/main/java/org/springframework/web/socket/sockjs/SockJsFrame.java
index 17233f7e0e9c..c1a52ef37dc9 100644
--- a/spring-websocket/src/main/java/org/springframework/web/socket/sockjs/SockJsFrame.java
+++ b/spring-websocket/src/main/java/org/springframework/web/socket/sockjs/SockJsFrame.java
@@ -78,24 +78,29 @@ public byte[] getContentBytes() {
return this.content.getBytes(Charset.forName("UTF-8"));
}
- public static String escapeCharacters(char[] chars) {
+ /**
+ * See "JSON Unicode Encoding" section of SockJS protocol.
+ */
+ public static String escapeCharacters(char[] characters) {
StringBuilder result = new StringBuilder();
- for (char ch : chars) {
- if (isSockJsEscapeCharacter(ch)) {
+ for (char c : characters) {
+ if (isSockJsEscapeCharacter(c)) {
result.append('\\').append('u');
- String hex = Integer.toHexString(ch).toLowerCase();
+ String hex = Integer.toHexString(c).toLowerCase();
for (int i = 0; i < (4 - hex.length()); i++) {
result.append('0');
}
result.append(hex);
}
else {
- result.append(ch);
+ result.append(c);
}
}
return result.toString();
}
+ // See `escapable_by_server` var in SockJS protocol (under "JSON Unicode Encoding")
+
private static boolean isSockJsEscapeCharacter(char ch) {
return (ch >= '\u0000' && ch <= '\u001F') || (ch >= '\u200C' && ch <= '\u200F')
|| (ch >= '\u2028' && ch <= '\u202F') || (ch >= '\u2060' && ch <= '\u206F')
|
ece368462243d7b4721029e31a261b450e026296
|
kotlin
|
Lazy receiver parameter descriptor: to avoid- eager computation of default types--
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/frontend/serialization/src/org/jetbrains/jet/descriptors/serialization/descriptors/DeserializedClassDescriptor.java b/compiler/frontend/serialization/src/org/jetbrains/jet/descriptors/serialization/descriptors/DeserializedClassDescriptor.java
index 510eb55f10e83..bab1998e33c47 100644
--- a/compiler/frontend/serialization/src/org/jetbrains/jet/descriptors/serialization/descriptors/DeserializedClassDescriptor.java
+++ b/compiler/frontend/serialization/src/org/jetbrains/jet/descriptors/serialization/descriptors/DeserializedClassDescriptor.java
@@ -21,14 +21,15 @@
import org.jetbrains.jet.descriptors.serialization.*;
import org.jetbrains.jet.lang.descriptors.*;
import org.jetbrains.jet.lang.descriptors.annotations.AnnotationDescriptor;
+import org.jetbrains.jet.lang.descriptors.impl.AbstractReceiverParameterDescriptor;
import org.jetbrains.jet.lang.descriptors.impl.ClassDescriptorBase;
-import org.jetbrains.jet.lang.descriptors.impl.ReceiverParameterDescriptorImpl;
import org.jetbrains.jet.lang.resolve.OverrideResolver;
import org.jetbrains.jet.lang.resolve.TraceUtil;
import org.jetbrains.jet.lang.resolve.lazy.storage.*;
import org.jetbrains.jet.lang.resolve.name.Name;
import org.jetbrains.jet.lang.resolve.scopes.JetScope;
import org.jetbrains.jet.lang.resolve.scopes.receivers.ClassReceiver;
+import org.jetbrains.jet.lang.resolve.scopes.receivers.ReceiverValue;
import org.jetbrains.jet.lang.types.JetType;
import org.jetbrains.jet.lang.types.TypeConstructor;
@@ -77,7 +78,7 @@ public DeserializedClassDescriptor(
this.containingDeclaration = containingDeclaration;
this.typeConstructor = new DeserializedClassTypeConstructor();
this.memberScope = new DeserializedClassMemberScope(this);
- this.thisAsReceiverParameter = new ReceiverParameterDescriptorImpl(this, getDefaultType(), new ClassReceiver(this));
+ this.thisAsReceiverParameter = new LazyClassReceiverParameterDescriptor();
this.name = nameResolver.getName(classProto.getName());
int flags = classProto.getFlags();
@@ -433,5 +434,27 @@ public Collection<ClassDescriptor> getAllDescriptors() {
return result;
}
}
+
+ private class LazyClassReceiverParameterDescriptor extends AbstractReceiverParameterDescriptor {
+ private final ClassReceiver classReceiver = new ClassReceiver(DeserializedClassDescriptor.this);
+
+ @NotNull
+ @Override
+ public JetType getType() {
+ return getDefaultType();
+ }
+
+ @NotNull
+ @Override
+ public ReceiverValue getValue() {
+ return classReceiver;
+ }
+
+ @NotNull
+ @Override
+ public DeclarationDescriptor getContainingDeclaration() {
+ return DeserializedClassDescriptor.this;
+ }
+ }
}
|
2764024351db5082ce5e8f1664a37d1629a56a69
|
spring-framework
|
JavaDoc on- AnnotationConfigApplicationContext-scan(String...)--
|
p
|
https://github.com/spring-projects/spring-framework
|
diff --git a/org.springframework.context/src/main/java/org/springframework/context/annotation/AnnotationConfigApplicationContext.java b/org.springframework.context/src/main/java/org/springframework/context/annotation/AnnotationConfigApplicationContext.java
index 0b0a9053c469..2427d6c174e9 100644
--- a/org.springframework.context/src/main/java/org/springframework/context/annotation/AnnotationConfigApplicationContext.java
+++ b/org.springframework.context/src/main/java/org/springframework/context/annotation/AnnotationConfigApplicationContext.java
@@ -112,7 +112,10 @@ public void register(Class<?>... annotatedClasses) {
/**
* Perform a scan within the specified base packages.
+ * Note that {@link AnnotationConfigApplicationContext#refresh()} must be
+ * called in order for the context to fully process the new class.
* @param basePackages the packages to check for annotated classes
+ * @see #refresh()
*/
public void scan(String... basePackages) {
this.scanner.scan(basePackages);
|
e9dee4fc76caaca231bf10728d4f82bc46581bc5
|
intellij-community
|
fixed PY-2674 Assignment can be replaced with- augmented assignmet breaks context--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/python/src/com/jetbrains/python/inspections/PyAugmentAssignmentInspection.java b/python/src/com/jetbrains/python/inspections/PyAugmentAssignmentInspection.java
index 5a7383a1d72e8..24c3143a1ba4c 100644
--- a/python/src/com/jetbrains/python/inspections/PyAugmentAssignmentInspection.java
+++ b/python/src/com/jetbrains/python/inspections/PyAugmentAssignmentInspection.java
@@ -45,7 +45,7 @@ public void visitPyAssignmentStatement(final PyAssignmentStatement node) {
PyExpression rightExpression = expression.getRightExpression();
if (rightExpression != null) {
boolean changedParts = false;
- if (rightExpression.getText().equals(target.getText())) {
+ if (rightExpression.getText().equals(target.getText()) && leftExpression instanceof PyNumericLiteralExpression) {
PyExpression tmp = rightExpression;
rightExpression = leftExpression;
leftExpression = tmp;
|
d176607d729525905066b65eb763d466d5753763
|
restlet-framework-java
|
- When updating a file via the FILE- connector, some deletion issue could occur on Windows due to JVM- integration issues with the OS. A workaround to reduce this was to- invoke the garbage collection. Reported by Kevin Conaway.--
|
c
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet/src/org/restlet/engine/local/FileClientHelper.java b/modules/org.restlet/src/org/restlet/engine/local/FileClientHelper.java
index 875eb6d3f8..e710d7e508 100644
--- a/modules/org.restlet/src/org/restlet/engine/local/FileClientHelper.java
+++ b/modules/org.restlet/src/org/restlet/engine/local/FileClientHelper.java
@@ -339,11 +339,12 @@ protected void handleFilePut(Request request, Response response,
}
}
}
+
if (uniqueVariant != null) {
file = uniqueVariant;
} else {
if (!variantsList.isEmpty()) {
- // Negociated resource (several variants, but not the
+ // Negotiated resource (several variants, but not the
// right one).
// Check if the request could be completed or not.
// The request could be more precise
@@ -404,6 +405,7 @@ protected void handleFilePut(Request request, Response response,
.toString());
}
}
+
// Before putting the file representation, we check that all
// the extensions are known
if (!checkExtensionsConsistency(file, metadataService)) {
@@ -415,6 +417,7 @@ protected void handleFilePut(Request request, Response response,
} else {
File tmp = null;
boolean error = false;
+
if (file.exists()) {
// The PUT call is handled in two phases:
// 1- write a temporary file
@@ -535,6 +538,11 @@ protected void handleFilePut(Request request, Response response,
}
return;
}
+
+ // Calling the garbage collector helps to
+ // workaround deletion issues on Windows
+ System.gc();
+
// Then delete the existing file
if (tmp.exists() && file.delete()) {
// Finally move the temporary file to the
@@ -552,7 +560,7 @@ protected void handleFilePut(Request request, Response response,
// Many aspects of the behavior of the method
// "renameTo" are inherently platform-dependent:
// the rename operation might not be able to
- // move a file from one filesystem to another.
+ // move a file from one file system to another.
if (tmp.exists()) {
try {
final BufferedReader br = new BufferedReader(
|
c095c8f95cadbd5fac7952da6aaa4c13aae1274e
|
drools
|
refactor GAV to ReleaseId--
|
p
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/main/java/org/drools/cdi/KieCDIExtension.java b/drools-compiler/src/main/java/org/drools/cdi/KieCDIExtension.java
index bfac3dd44d2..f6c4df153f6 100644
--- a/drools-compiler/src/main/java/org/drools/cdi/KieCDIExtension.java
+++ b/drools-compiler/src/main/java/org/drools/cdi/KieCDIExtension.java
@@ -3,7 +3,7 @@
import org.drools.kproject.models.KieSessionModelImpl;
import org.kie.KieBase;
import org.kie.KieServices;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieBaseModel;
import org.kie.builder.KieSessionModel;
import org.kie.builder.KieSessionModel.KieSessionType;
@@ -11,7 +11,7 @@
import org.kie.builder.impl.KieContainerImpl;
import org.kie.builder.impl.KieProject;
import org.kie.cdi.KBase;
-import org.kie.cdi.KGAV;
+import org.kie.cdi.KReleaseId;
import org.kie.cdi.KSession;
import org.kie.runtime.KieContainer;
import org.kie.runtime.KieSession;
@@ -52,7 +52,7 @@ public class KieCDIExtension
private Map<KieCDIEntry, KieCDIEntry> kBaseNames;
private Map<KieCDIEntry, KieCDIEntry> kSessionNames;
- private Map<GAV, KieContainer> gavs;
+ private Map<ReleaseId, KieContainer> gavs;
private Map<String, KieCDIEntry> named;
@@ -67,7 +67,7 @@ public KieCDIExtension() { }
public void init() {
KieServices ks = KieServices.Factory.get();
- gavs = new HashMap<GAV, KieContainer>();
+ gavs = new HashMap<ReleaseId, KieContainer>();
classpathKContainer = (KieContainerImpl) ks.getKieClasspathContainer(); //new KieContainerImpl( kProject, null );
named = new HashMap<String, KieCDIExtension.KieCDIEntry>();
}
@@ -89,13 +89,13 @@ public <Object> void processInjectionTarget(@Observes ProcessInjectionTarget<Obj
continue;
}
- KGAV kGAV = ip.getAnnotated().getAnnotation( KGAV.class );
- GAV gav = null;
- if ( kGAV != null ) {
- gav = ks.newGav( kGAV.groupId(),
- kGAV.artifactId(),
- kGAV.version() );
- gavs.put( gav,
+ KReleaseId KReleaseId = ip.getAnnotated().getAnnotation( KReleaseId.class );
+ ReleaseId releaseId = null;
+ if ( KReleaseId != null ) {
+ releaseId = ks.newReleaseId(KReleaseId.groupId(),
+ KReleaseId.artifactId(),
+ KReleaseId.version());
+ gavs.put(releaseId,
null );
}
@@ -108,22 +108,22 @@ public <Object> void processInjectionTarget(@Observes ProcessInjectionTarget<Obj
Class< ? extends Annotation> scope = ApplicationScoped.class;
if ( kBase != null ) {
- addKBaseInjectionPoint(ip, kBase, namedStr, scope, gav);
+ addKBaseInjectionPoint(ip, kBase, namedStr, scope, releaseId);
} else if ( kSession != null ) {
- addKSessionInjectionPoint(ip, kSession, namedStr, scope, gav);
+ addKSessionInjectionPoint(ip, kSession, namedStr, scope, releaseId);
}
}
}
}
- public void addKBaseInjectionPoint(InjectionPoint ip, KBase kBase, String namedStr, Class< ? extends Annotation> scope, GAV gav) {
+ public void addKBaseInjectionPoint(InjectionPoint ip, KBase kBase, String namedStr, Class< ? extends Annotation> scope, ReleaseId releaseId) {
if ( kBaseNames == null ) {
kBaseNames = new HashMap<KieCDIEntry, KieCDIEntry>();
}
KieCDIEntry newEntry = new KieCDIEntry( kBase.value(),
scope,
- gav,
+ releaseId,
namedStr );
KieCDIEntry existingEntry = kBaseNames.remove( newEntry );
@@ -154,14 +154,14 @@ public void addKBaseInjectionPoint(InjectionPoint ip, KBase kBase, String namedS
}
}
- public void addKSessionInjectionPoint(InjectionPoint ip, KSession kSession, String namedStr, Class< ? extends Annotation> scope, GAV gav) {
+ public void addKSessionInjectionPoint(InjectionPoint ip, KSession kSession, String namedStr, Class< ? extends Annotation> scope, ReleaseId releaseId) {
if ( kSessionNames == null ) {
kSessionNames = new HashMap<KieCDIEntry, KieCDIEntry>();
}
KieCDIEntry newEntry = new KieCDIEntry( kSession.value(),
scope,
- gav,
+ releaseId,
namedStr );
KieCDIEntry existingEntry = kSessionNames.remove( newEntry );
@@ -200,16 +200,16 @@ public void afterBeanDiscovery(@Observes AfterBeanDiscovery abd,
// to array, so we don't mutate that which we are iterating over
if ( !gavs.isEmpty() ) {
- for ( GAV gav : gavs.keySet().toArray( new GAV[gavs.size()] ) ) {
- KieContainer kContainer = ks.newKieContainer(gav);
+ for ( ReleaseId releaseId : gavs.keySet().toArray( new ReleaseId[gavs.size()] ) ) {
+ KieContainer kContainer = ks.newKieContainer(releaseId);
if ( kContainer == null ) {
- log.error( "Unable to retrieve KieContainer for GAV {}",
- gav.toString() );
+ log.error( "Unable to retrieve KieContainer for ReleaseId {}",
+ releaseId.toString() );
} else {
- log.debug( "KieContainer retrieved for GAV {}",
- gav.toString() );
+ log.debug( "KieContainer retrieved for ReleaseId {}",
+ releaseId.toString() );
}
- gavs.put( gav,
+ gavs.put(releaseId,
kContainer );
}
}
@@ -234,14 +234,14 @@ public void afterBeanDiscovery(@Observes AfterBeanDiscovery abd,
public void addKBaseBean(AfterBeanDiscovery abd,
KieCDIEntry entry) {
- GAV gav = entry.getkGAV();
+ ReleaseId releaseId = entry.getkGAV();
KieContainerImpl kieContainer = classpathKContainer; // default to classpath, but allow it to be overriden
- if ( gav != null ) {
- kieContainer = (KieContainerImpl) gavs.get( gav );
+ if ( releaseId != null ) {
+ kieContainer = (KieContainerImpl) gavs.get(releaseId);
if ( kieContainer == null ) {
- log.error( "Unable to create KBase({}), could not retrieve KieContainer for GAV {}",
+ log.error( "Unable to create KBase({}), could not retrieve KieContainer for ReleaseId {}",
entry.getKieTypeName(),
- gav.toString() );
+ releaseId.toString() );
return;
}
}
@@ -282,14 +282,14 @@ public void addKBaseBean(AfterBeanDiscovery abd,
public void addKSessionBean(AfterBeanDiscovery abd,
KieCDIEntry entry) {
- GAV gav = entry.getkGAV();
+ ReleaseId releaseId = entry.getkGAV();
KieContainerImpl kieContainer = classpathKContainer; // default to classpath, but allow it to be overriden
- if ( gav != null ) {
- kieContainer = (KieContainerImpl) gavs.get( gav );
+ if ( releaseId != null ) {
+ kieContainer = (KieContainerImpl) gavs.get(releaseId);
if ( kieContainer == null ) {
- log.error( "Unable to create KSession({}), could not retrieve KieContainer for GAV {}",
+ log.error( "Unable to create KSession({}), could not retrieve KieContainer for ReleaseId {}",
entry.getKieTypeName(),
- gav.toString() );
+ releaseId.toString() );
return;
}
}
@@ -397,13 +397,13 @@ public String toString() {
}
} );
}
- if ( kContainer.getGAV() != null ) {
- final String groupId = kContainer.getGAV().getGroupId();
- final String artifactId = kContainer.getGAV().getArtifactId();
- final String version = kContainer.getGAV().getVersion();
- set.add( new KGAV() {
+ if ( kContainer.getReleaseId() != null ) {
+ final String groupId = kContainer.getReleaseId().getGroupId();
+ final String artifactId = kContainer.getReleaseId().getArtifactId();
+ final String version = kContainer.getReleaseId().getVersion();
+ set.add( new KReleaseId() {
public Class< ? extends Annotation> annotationType() {
- return KGAV.class;
+ return KReleaseId.class;
}
public String groupId() {
@@ -419,7 +419,7 @@ public String version() {
}
public String toString() {
- return "KGAV[groupId=" + groupId + " artifactId" + artifactId + " version=" + version + "]";
+ return "KReleaseId[groupId=" + groupId + " artifactId" + artifactId + " version=" + version + "]";
}
} );
}
@@ -536,13 +536,13 @@ public String toString() {
}
} );
}
- if ( kContainer.getGAV() != null ) {
- final String groupId = kContainer.getGAV().getGroupId();
- final String artifactId = kContainer.getGAV().getArtifactId();
- final String version = kContainer.getGAV().getVersion();
- set.add( new KGAV() {
+ if ( kContainer.getReleaseId() != null ) {
+ final String groupId = kContainer.getReleaseId().getGroupId();
+ final String artifactId = kContainer.getReleaseId().getArtifactId();
+ final String version = kContainer.getReleaseId().getVersion();
+ set.add( new KReleaseId() {
public Class< ? extends Annotation> annotationType() {
- return KGAV.class;
+ return KReleaseId.class;
}
@Override
@@ -561,7 +561,7 @@ public String version() {
}
public String toString() {
- return "KGAV[groupId=" + groupId + " artifactId" + artifactId + " version=" + version + "]";
+ return "KReleaseId[groupId=" + groupId + " artifactId" + artifactId + " version=" + version + "]";
}
} );
}
@@ -670,13 +670,13 @@ public String toString() {
}
} );
}
- if ( kContainer.getGAV() != null ) {
- final String groupId = kContainer.getGAV().getGroupId();
- final String artifactId = kContainer.getGAV().getArtifactId();
- final String version = kContainer.getGAV().getVersion();
- set.add( new KGAV() {
+ if ( kContainer.getReleaseId() != null ) {
+ final String groupId = kContainer.getReleaseId().getGroupId();
+ final String artifactId = kContainer.getReleaseId().getArtifactId();
+ final String version = kContainer.getReleaseId().getVersion();
+ set.add( new KReleaseId() {
public Class< ? extends Annotation> annotationType() {
- return KGAV.class;
+ return KReleaseId.class;
}
@Override
@@ -695,7 +695,7 @@ public String version() {
}
public String toString() {
- return "KGAV[groupId=" + groupId + " artifactId" + artifactId + " version=" + version + "]";
+ return "KReleaseId[groupId=" + groupId + " artifactId" + artifactId + " version=" + version + "]";
}
} );
}
@@ -752,18 +752,18 @@ public boolean isNullable() {
public static class KieCDIEntry {
private String kieTypeName;
private Class< ? extends Annotation> scope;
- private GAV kGav;
+ private ReleaseId kReleaseId;
private String named;
private Set<InjectionPoint> injectionPoints;
public KieCDIEntry(String kieTypeName,
Class< ? extends Annotation> scope,
- GAV gav,
+ ReleaseId releaseId,
String named) {
super();
this.kieTypeName = kieTypeName;
this.scope = scope;
- this.kGav = gav;
+ this.kReleaseId = releaseId;
this.named = named;
this.injectionPoints = new HashSet<InjectionPoint>();
}
@@ -797,12 +797,12 @@ public void setScope(Class< ? extends Annotation> scope) {
return scope;
}
- public GAV getkGAV() {
- return kGav;
+ public ReleaseId getkGAV() {
+ return kReleaseId;
}
- public void setkGAV(GAV kGav) {
- this.kGav = kGav;
+ public void setkGAV(ReleaseId kReleaseId) {
+ this.kReleaseId = kReleaseId;
}
/**
@@ -833,7 +833,7 @@ public void setInjectionPoints(Set<InjectionPoint> injectionPoints) {
public int hashCode() {
final int prime = 31;
int result = 1;
- result = prime * result + ((kGav == null) ? 0 : kGav.hashCode());
+ result = prime * result + ((kReleaseId == null) ? 0 : kReleaseId.hashCode());
result = prime * result + ((kieTypeName == null) ? 0 : kieTypeName.hashCode());
result = prime * result + ((named == null) ? 0 : named.hashCode());
result = prime * result + ((scope == null) ? 0 : scope.hashCode());
@@ -846,9 +846,9 @@ public boolean equals(java.lang.Object obj) {
if ( obj == null ) return false;
if ( getClass() != obj.getClass() ) return false;
KieCDIEntry other = (KieCDIEntry) obj;
- if ( kGav == null ) {
- if ( other.kGav != null ) return false;
- } else if ( !kGav.equals( other.kGav ) ) return false;
+ if ( kReleaseId == null ) {
+ if ( other.kReleaseId != null ) return false;
+ } else if ( !kReleaseId.equals( other.kReleaseId) ) return false;
if ( kieTypeName == null ) {
if ( other.kieTypeName != null ) return false;
} else if ( !kieTypeName.equals( other.kieTypeName ) ) return false;
@@ -863,7 +863,7 @@ public boolean equals(java.lang.Object obj) {
@Override
public String toString() {
- return "KieCDIEntry [kieTypeName=" + kieTypeName + ", scope=" + scope + ", kGav=" + kGav + ", named=" + named + "]";
+ return "KieCDIEntry [kieTypeName=" + kieTypeName + ", scope=" + scope + ", kReleaseId=" + kReleaseId + ", named=" + named + "]";
}
}
diff --git a/drools-compiler/src/main/java/org/drools/kproject/GAVImpl.java b/drools-compiler/src/main/java/org/drools/kproject/ReleaseIdImpl.java
similarity index 82%
rename from drools-compiler/src/main/java/org/drools/kproject/GAVImpl.java
rename to drools-compiler/src/main/java/org/drools/kproject/ReleaseIdImpl.java
index 8ee485e90e0..b60516c089d 100644
--- a/drools-compiler/src/main/java/org/drools/kproject/GAVImpl.java
+++ b/drools-compiler/src/main/java/org/drools/kproject/ReleaseIdImpl.java
@@ -2,20 +2,19 @@
import java.io.IOException;
import java.io.StringReader;
-import java.nio.charset.MalformedInputException;
import java.util.Properties;
import org.drools.core.util.StringUtils;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
-public class GAVImpl implements GAV {
+public class ReleaseIdImpl implements ReleaseId {
private final String groupId;
private final String artifactId;
private final String version;
- public GAVImpl(String groupId,
- String artifactId,
- String version) {
+ public ReleaseIdImpl(String groupId,
+ String artifactId,
+ String version) {
this.groupId = groupId;
this.artifactId = artifactId;
this.version = version;
@@ -50,23 +49,23 @@ public String getPomPropertiesPath() {
return "META-INF/maven/" + groupId + "/" + artifactId + "/pom.properties";
}
- public static GAV fromPropertiesString(String string) {
+ public static ReleaseId fromPropertiesString(String string) {
Properties props = new Properties();
- GAV gav = null;
+ ReleaseId releaseId = null;
try {
props.load( new StringReader( string ) );
String groupId = props.getProperty( "groupId" );
String artifactId = props.getProperty( "artifactId" );
String version = props.getProperty( "version" );
if ( StringUtils.isEmpty( groupId ) || StringUtils.isEmpty( artifactId ) || StringUtils.isEmpty( version ) ) {
- throw new RuntimeException("pom.properties exists but GAV content is malformed\n" + string);
+ throw new RuntimeException("pom.properties exists but ReleaseId content is malformed\n" + string);
}
- gav = new GAVImpl( groupId, artifactId, version );
+ releaseId = new ReleaseIdImpl( groupId, artifactId, version );
} catch ( IOException e ) {
throw new RuntimeException( "pom.properties was malformed\n" + string, e );
}
- return gav;
+ return releaseId;
}
@Override
@@ -74,7 +73,7 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- GAVImpl that = (GAVImpl) o;
+ ReleaseIdImpl that = (ReleaseIdImpl) o;
if (artifactId != null ? !artifactId.equals(that.artifactId) : that.artifactId != null) return false;
if (groupId != null ? !groupId.equals(that.groupId) : that.groupId != null) return false;
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/AbstractKieModule.java b/drools-compiler/src/main/java/org/kie/builder/impl/AbstractKieModule.java
index e02b3bf46ce..277cdb8d86b 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/AbstractKieModule.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/AbstractKieModule.java
@@ -8,7 +8,7 @@
import org.kie.KieBaseConfiguration;
import org.kie.KnowledgeBaseFactory;
import org.kie.builder.CompositeKnowledgeBuilder;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieBaseModel;
import org.kie.builder.KieModuleModel;
import org.kie.builder.KnowledgeBuilder;
@@ -45,15 +45,15 @@ public abstract class AbstractKieModule
private final Map<String, Results> resultsCache = new HashMap<String, Results>();
- protected final GAV gav;
+ protected final ReleaseId releaseId;
private final KieModuleModel kModuleModel;
- private Map<GAV, InternalKieModule> dependencies;
+ private Map<ReleaseId, InternalKieModule> dependencies;
- public AbstractKieModule(GAV gav, KieModuleModel kModuleModel) {
- this.gav = gav;
+ public AbstractKieModule(ReleaseId releaseId, KieModuleModel kModuleModel) {
+ this.releaseId = releaseId;
this.kModuleModel = kModuleModel;
}
@@ -63,14 +63,14 @@ public KieModuleModel getKieModuleModel() {
// public void index() {
// if ( kieModules == null ) {
-// kieModules = new HashMap<GAV, InternalKieModule>();
+// kieModules = new HashMap<ReleaseId, InternalKieModule>();
// kieModules.putAll( dependencies );
-// kieModules.put( gav, this );
+// kieModules.put( releaseId, this );
// indexParts( kieModules, kBaseModels, kSessionModels, kJarFromKBaseName );
// }
// }
//
-// public Map<GAV, InternalKieModule> getKieModules() {
+// public Map<ReleaseId, InternalKieModule> getKieModules() {
// if ( kieModules == null ) {
// index();
// }
@@ -79,9 +79,9 @@ public KieModuleModel getKieModuleModel() {
// public void verify(Messages messages) {
// if ( kieModules == null ) {
-// kieModules = new HashMap<GAV, InternalKieModule>();
+// kieModules = new HashMap<ReleaseId, InternalKieModule>();
// kieModules.putAll( dependencies );
-// kieModules.put( gav, this );
+// kieModules.put( releaseId, this );
// indexParts( kieModules, kBaseModels, kSessionModels, kJarFromKBaseName );
//
// for ( KieBaseModel model : kBaseModels.values() ) {
@@ -90,19 +90,19 @@ public KieModuleModel getKieModuleModel() {
// }
// }
- public Map<GAV, InternalKieModule> getDependencies() {
- return dependencies == null ? Collections.<GAV, InternalKieModule>emptyMap() : dependencies;
+ public Map<ReleaseId, InternalKieModule> getDependencies() {
+ return dependencies == null ? Collections.<ReleaseId, InternalKieModule>emptyMap() : dependencies;
}
public void addDependency(InternalKieModule dependency) {
if (dependencies == null) {
- dependencies = new HashMap<GAV, InternalKieModule>();
+ dependencies = new HashMap<ReleaseId, InternalKieModule>();
}
- dependencies.put(dependency.getGAV(), dependency);
+ dependencies.put(dependency.getReleaseId(), dependency);
}
- public GAV getGAV() {
- return gav;
+ public ReleaseId getReleaseId() {
+ return releaseId;
}
public Map<String, Collection<KnowledgePackage>> getKnowledgePackageCache() {
@@ -175,7 +175,7 @@ public static KieBase createKieBase(KieBaseModelImpl kBaseModel,
InternalKieModule kModule = indexedParts.getKieModuleForKBase( kBaseModel.getName() );
- Collection<KnowledgePackage> pkgs = kModule.getKnowledgePackageCache().get( kBaseModel.getName() );
+ Collection<KnowledgePackage> pkgs = kModule.getKnowledgePackageCache().get(kBaseModel.getName());
if ( pkgs == null ) {
KnowledgeBuilder kbuilder = buildKnowledgePackages(kBaseModel, indexedParts, messages);
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/AbstractKieProject.java b/drools-compiler/src/main/java/org/kie/builder/impl/AbstractKieProject.java
index 5678b40f357..cc5958803d5 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/AbstractKieProject.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/AbstractKieProject.java
@@ -2,7 +2,7 @@
import org.drools.kproject.models.KieBaseModelImpl;
import org.drools.kproject.models.KieSessionModelImpl;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieBaseModel;
import org.kie.builder.KieModuleModel;
import org.kie.builder.KieSessionModel;
@@ -34,9 +34,9 @@ public ResultsImpl verify() {
public void verify(ResultsImpl messages) {
for ( KieBaseModel model : kBaseModels.values() ) {
- AbstractKieModule.buildKnowledgePackages( (KieBaseModelImpl) model,
- this,
- messages );
+ AbstractKieModule.buildKnowledgePackages((KieBaseModelImpl) model,
+ this,
+ messages);
}
}
@@ -60,7 +60,7 @@ public KieSessionModel getKieSessionModel(String kSessionName) {
return kSessionModels.get( kSessionName );
}
- protected void indexParts(Map<GAV, InternalKieModule> kieModules,
+ protected void indexParts(Map<ReleaseId, InternalKieModule> kieModules,
Map<String, InternalKieModule> kJarFromKBaseName) {
for ( InternalKieModule kJar : kieModules.values() ) {
KieModuleModel kieProject = kJar.getKieModuleModel();
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/ClasspathKieProject.java b/drools-compiler/src/main/java/org/kie/builder/impl/ClasspathKieProject.java
index f626d905742..24ded685d19 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/ClasspathKieProject.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/ClasspathKieProject.java
@@ -1,12 +1,12 @@
package org.kie.builder.impl;
import org.drools.core.util.StringUtils;
-import org.drools.kproject.GAVImpl;
+import org.drools.kproject.ReleaseIdImpl;
import org.drools.kproject.models.KieModuleModelImpl;
import org.drools.xml.MinimalPomParser;
import org.drools.xml.PomModel;
import org.kie.KieServices;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieModuleModel;
import org.kie.builder.KieRepository;
import org.kie.internal.utils.ClassLoaderUtil;
@@ -38,7 +38,7 @@ public class ClasspathKieProject extends AbstractKieProject {
private static final Logger log = LoggerFactory.getLogger( ClasspathKieProject.class );
- private Map<GAV, InternalKieModule> kieModules = new HashMap<GAV, InternalKieModule>();
+ private Map<ReleaseId, InternalKieModule> kieModules = new HashMap<ReleaseId, InternalKieModule>();
private Map<String, InternalKieModule> kJarFromKBaseName = new HashMap<String, InternalKieModule>();
@@ -60,7 +60,7 @@ public void init() {
indexParts(kieModules, kJarFromKBaseName);
}
- public GAV getGAV() {
+ public ReleaseId getGAV() {
return null;
}
@@ -82,10 +82,10 @@ public void discoverKieModules() {
String fixedURL = fixURLFromKProjectPath( url );
InternalKieModule kModule = fetchKModule(url, fixedURL);
- GAV gav = kModule.getGAV();
- kieModules.put(gav, kModule);
+ ReleaseId releaseId = kModule.getReleaseId();
+ kieModules.put(releaseId, kModule);
- log.debug( "Discovered classpath module " + gav.toExternalForm() );
+ log.debug( "Discovered classpath module " + releaseId.toExternalForm() );
kr.addKieModule(kModule);
@@ -103,7 +103,7 @@ public static InternalKieModule fetchKModule(URL url, String fixedURL) {
String pomProperties = getPomProperties( fixedURL );
- GAV gav = GAVImpl.fromPropertiesString( pomProperties );
+ ReleaseId releaseId = ReleaseIdImpl.fromPropertiesString(pomProperties);
String rootPath = fixedURL;
if ( rootPath.lastIndexOf( ':' ) > 0 ) {
@@ -113,11 +113,11 @@ public static InternalKieModule fetchKModule(URL url, String fixedURL) {
InternalKieModule kJar;
File file = new File( rootPath );
if ( fixedURL.endsWith( ".jar" ) ) {
- kJar = new ZipKieModule( gav,
+ kJar = new ZipKieModule(releaseId,
kieProject,
file );
} else if ( file.isDirectory() ) {
- kJar = new FileKieModule( gav,
+ kJar = new FileKieModule(releaseId,
kieProject,
file );
} else {
@@ -206,9 +206,9 @@ public static String getPomProperties(String urlPathToAdd) {
KieBuilderImpl.validatePomModel( pomModel ); // throws an exception if invalid
- GAVImpl gav = ( GAVImpl ) KieServices.Factory.get().newGav( pomModel.getGroupId(),
- pomModel.getArtifactId(),
- pomModel.getVersion() );
+ ReleaseIdImpl gav = (ReleaseIdImpl) KieServices.Factory.get().newReleaseId(pomModel.getGroupId(),
+ pomModel.getArtifactId(),
+ pomModel.getVersion());
String str = KieBuilderImpl.generatePomProperties( gav );
log.info( "Recursed up folders, found and used pom.xml " + file );
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/FileKieModule.java b/drools-compiler/src/main/java/org/kie/builder/impl/FileKieModule.java
index 966766ddf8c..d5cdd26f53a 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/FileKieModule.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/FileKieModule.java
@@ -8,16 +8,16 @@
import java.util.Collection;
import org.drools.core.util.IoUtils;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieModuleModel;
public class FileKieModule extends AbstractKieModule implements InternalKieModule {
private final File file;
- public FileKieModule(GAV gav,
+ public FileKieModule(ReleaseId releaseId,
KieModuleModel kieProject,
File file) {
- super( gav, kieProject );
+ super(releaseId, kieProject );
this.file = file;
}
@@ -59,7 +59,7 @@ public byte[] getBytes() {
}
public String toString() {
- return "FileKieModule[ GAV=" + getGAV() + "file=" + file + "]";
+ return "FileKieModule[ ReleaseId=" + getReleaseId() + "file=" + file + "]";
}
}
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/InternalKieModule.java b/drools-compiler/src/main/java/org/kie/builder/impl/InternalKieModule.java
index 75ad672ce9e..451b8fce268 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/InternalKieModule.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/InternalKieModule.java
@@ -1,6 +1,6 @@
package org.kie.builder.impl;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieModule;
import org.kie.builder.KieModuleModel;
import org.kie.builder.Results;
@@ -20,7 +20,7 @@ public interface InternalKieModule extends KieModule {
byte[] getBytes( );
- Map<GAV, InternalKieModule> getDependencies();
+ Map<ReleaseId, InternalKieModule> getDependencies();
void addDependency(InternalKieModule dependency);
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/InternalKieScanner.java b/drools-compiler/src/main/java/org/kie/builder/impl/InternalKieScanner.java
index 0c2c60bafc1..03cbc10c998 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/InternalKieScanner.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/InternalKieScanner.java
@@ -1,6 +1,6 @@
package org.kie.builder.impl;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieModule;
import org.kie.builder.KieScanner;
import org.kie.runtime.KieContainer;
@@ -9,5 +9,5 @@ public interface InternalKieScanner extends KieScanner {
void setKieContainer(KieContainer kieContainer);
- KieModule loadArtifact(GAV gav);
+ KieModule loadArtifact(ReleaseId releaseId);
}
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/KieBuilderImpl.java b/drools-compiler/src/main/java/org/kie/builder/impl/KieBuilderImpl.java
index b889c2b983d..1488859dbce 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/KieBuilderImpl.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/KieBuilderImpl.java
@@ -8,14 +8,14 @@
import org.drools.commons.jci.readers.ResourceReader;
import org.drools.compiler.io.memory.MemoryFileSystem;
import org.drools.core.util.StringUtils;
-import org.drools.kproject.GAVImpl;
+import org.drools.kproject.ReleaseIdImpl;
import org.drools.kproject.models.KieModuleModelImpl;
import org.drools.xml.MinimalPomParser;
import org.drools.xml.PomModel;
import org.kie.KieBaseConfiguration;
import org.kie.KieServices;
import org.kie.KnowledgeBaseFactory;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieBaseModel;
import org.kie.builder.KieBuilder;
import org.kie.builder.KieFileSystem;
@@ -53,7 +53,7 @@ public class KieBuilderImpl
private PomModel pomModel;
private byte[] pomXml;
- private GAV gav;
+ private ReleaseId releaseId;
private byte[] kModuleModelXml;
private KieModuleModel kModuleModel;
@@ -91,7 +91,7 @@ private void init() {
results = new ResultsImpl();
- // if pomXML is null it will generate a default, using default GAV
+ // if pomXML is null it will generate a default, using default ReleaseId
// if pomXml is invalid, it assign pomModel to null
buildPomModel();
@@ -100,24 +100,24 @@ private void init() {
buildKieModuleModel();
if ( pomModel != null ) {
- // creates GAV from build pom
- // If the pom was generated, it will be the same as teh default GAV
- gav = ks.newGav( pomModel.getGroupId(),
- pomModel.getArtifactId(),
- pomModel.getVersion() );
+ // creates ReleaseId from build pom
+ // If the pom was generated, it will be the same as teh default ReleaseId
+ releaseId = ks.newReleaseId(pomModel.getGroupId(),
+ pomModel.getArtifactId(),
+ pomModel.getVersion());
}
}
public KieBuilder buildAll() {
- // gav and kModule will be null if a provided pom.xml or kmodule.xml is invalid
- if ( !isBuilt() && gav != null && kModuleModel != null ) {
+ // releaseId and kModule will be null if a provided pom.xml or kmodule.xml is invalid
+ if ( !isBuilt() && releaseId != null && kModuleModel != null ) {
trgMfs = new MemoryFileSystem();
writePomAndKModule();
compileJavaClasses();
addKBasesFilesToTrg();
- kModule = new MemoryKieModule( gav,
+ kModule = new MemoryKieModule(releaseId,
kModuleModel,
trgMfs );
@@ -282,7 +282,7 @@ public void buildPomModel() {
public static void validatePomModel(PomModel pomModel) {
if ( StringUtils.isEmpty( pomModel.getGroupId() ) || StringUtils.isEmpty( pomModel.getArtifactId() ) || StringUtils.isEmpty( pomModel.getVersion() ) ) {
- throw new RuntimeException( "Maven pom.properties exists but GAV content is malformed" );
+ throw new RuntimeException( "Maven pom.properties exists but ReleaseId content is malformed" );
}
}
@@ -290,8 +290,8 @@ public static byte[] getOrGeneratePomXml(ResourceReader mfs) {
if ( mfs.isAvailable( "pom.xml" ) ) {
return mfs.getBytes( "pom.xml" );
} else {
- // There is no pom.xml, and thus no GAV, so generate a pom.xml from the global detault.
- return generatePomXml( KieServices.Factory.get().getRepository().getDefaultGAV() ).getBytes();
+ // There is no pom.xml, and thus no ReleaseId, so generate a pom.xml from the global detault.
+ return generatePomXml( KieServices.Factory.get().getRepository().getDefaultReleaseId() ).getBytes();
}
}
@@ -299,12 +299,12 @@ public void writePomAndKModule() {
addMetaInfBuilder();
if ( pomXml != null ) {
- GAVImpl g = (GAVImpl) gav;
+ ReleaseIdImpl g = (ReleaseIdImpl) releaseId;
trgMfs.write( g.getPomXmlPath(),
pomXml,
true );
trgMfs.write( g.getPomPropertiesPath(),
- generatePomProperties( gav ).getBytes(),
+ generatePomProperties(releaseId).getBytes(),
true );
}
@@ -316,22 +316,22 @@ public void writePomAndKModule() {
}
}
- public static String generatePomXml(GAV gav) {
+ public static String generatePomXml(ReleaseId releaseId) {
StringBuilder sBuilder = new StringBuilder();
sBuilder.append( "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" \n" );
sBuilder.append( " xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\"> \n" );
sBuilder.append( " <modelVersion>4.0.0</modelVersion> \n" );
sBuilder.append( " <groupId>" );
- sBuilder.append( gav.getGroupId() );
+ sBuilder.append( releaseId.getGroupId() );
sBuilder.append( "</groupId> \n" );
sBuilder.append( " <artifactId>" );
- sBuilder.append( gav.getArtifactId() );
+ sBuilder.append( releaseId.getArtifactId() );
sBuilder.append( "</artifactId> \n" );
sBuilder.append( " <version>" );
- sBuilder.append( gav.getVersion() );
+ sBuilder.append( releaseId.getVersion() );
sBuilder.append( "</version> \n" );
sBuilder.append( " <packaging>jar</packaging> \n" );
@@ -342,18 +342,18 @@ public static String generatePomXml(GAV gav) {
return sBuilder.toString();
}
- public static String generatePomProperties(GAV gav) {
+ public static String generatePomProperties(ReleaseId releaseId) {
StringBuilder sBuilder = new StringBuilder();
sBuilder.append( "groupId=" );
- sBuilder.append( gav.getGroupId() );
+ sBuilder.append( releaseId.getGroupId() );
sBuilder.append( "\n" );
sBuilder.append( "artifactId=" );
- sBuilder.append( gav.getArtifactId() );
+ sBuilder.append( releaseId.getArtifactId() );
sBuilder.append( "\n" );
sBuilder.append( "version=" );
- sBuilder.append( gav.getVersion() );
+ sBuilder.append( releaseId.getVersion() );
sBuilder.append( "\n" );
return sBuilder.toString();
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/KieContainerImpl.java b/drools-compiler/src/main/java/org/kie/builder/impl/KieContainerImpl.java
index de9bb986921..7694dfe0d61 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/KieContainerImpl.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/KieContainerImpl.java
@@ -4,7 +4,7 @@
import org.drools.kproject.models.KieSessionModelImpl;
import org.kie.KieBase;
import org.kie.KnowledgeBaseFactory;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieBaseModel;
import org.kie.builder.KieModule;
import org.kie.builder.KieRepository;
@@ -43,13 +43,13 @@ public KieContainerImpl(KieProject kProject,
kProject.init();
}
- public GAV getGAV() {
+ public ReleaseId getReleaseId() {
return kProject.getGAV();
}
- public void updateToVersion(GAV gav) {
+ public void updateToVersion(ReleaseId releaseId) {
kBases.clear();
- this.kProject = new KieModuleKieProject( (InternalKieModule)kr.getKieModule(gav), kr );
+ this.kProject = new KieModuleKieProject( (InternalKieModule)kr.getKieModule(releaseId), kr );
this.kProject.init();
}
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/KieFileSystemImpl.java b/drools-compiler/src/main/java/org/kie/builder/impl/KieFileSystemImpl.java
index 12c8728b358..ac2a0fa252a 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/KieFileSystemImpl.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/KieFileSystemImpl.java
@@ -1,13 +1,11 @@
package org.kie.builder.impl;
import org.drools.compiler.io.memory.MemoryFileSystem;
-import org.drools.io.internal.InternalResource;
import org.drools.kproject.models.KieModuleModelImpl;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieFileSystem;
import org.kie.io.Resource;
import org.kie.io.ResourceConfiguration;
-import org.kie.io.ResourceType;
import org.kie.io.ResourceTypeImpl;
import java.io.ByteArrayOutputStream;
@@ -87,8 +85,8 @@ public MemoryFileSystem asMemoryFileSystem() {
return mfs;
}
- public KieFileSystem generateAndWritePomXML(GAV gav) {
- write("pom.xml", KieBuilderImpl.generatePomXml( gav ) );
+ public KieFileSystem generateAndWritePomXML(ReleaseId releaseId) {
+ write("pom.xml", KieBuilderImpl.generatePomXml(releaseId) );
return this;
}
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/KieModuleKieProject.java b/drools-compiler/src/main/java/org/kie/builder/impl/KieModuleKieProject.java
index 4023e166509..0f62cb64156 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/KieModuleKieProject.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/KieModuleKieProject.java
@@ -1,7 +1,7 @@
package org.kie.builder.impl;
import org.drools.core.util.ClassUtils;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieRepository;
import org.kie.internal.utils.ClassLoaderUtil;
import org.kie.internal.utils.CompositeClassLoader;
@@ -21,7 +21,7 @@ public class KieModuleKieProject extends AbstractKieProject {
private static final Logger log = LoggerFactory.getLogger( KieModuleKieProject.class );
- private Map<GAV, InternalKieModule> kieModules;
+ private Map<ReleaseId, InternalKieModule> kieModules;
private final Map<String, InternalKieModule> kJarFromKBaseName = new HashMap<String, InternalKieModule>();
@@ -38,9 +38,9 @@ public KieModuleKieProject(InternalKieModule kieModule,
public void init() {
if ( kieModules == null ) {
- kieModules = new HashMap<GAV, InternalKieModule>();
+ kieModules = new HashMap<ReleaseId, InternalKieModule>();
kieModules.putAll( kieModule.getDependencies() );
- kieModules.put( kieModule.getGAV(),
+ kieModules.put( kieModule.getReleaseId(),
kieModule );
indexParts( kieModules, kJarFromKBaseName );
initClassLaoder();
@@ -63,16 +63,16 @@ public void initClassLaoder() {
}
}
- public GAV getGAV() {
- return kieModule.getGAV();
+ public ReleaseId getGAV() {
+ return kieModule.getReleaseId();
}
public InternalKieModule getKieModuleForKBase(String kBaseName) {
- return this.kJarFromKBaseName.get( kBaseName );
+ return this.kJarFromKBaseName.get(kBaseName);
}
public boolean kieBaseExists(String kBaseName) {
- return kBaseModels.containsKey( kBaseName );
+ return kBaseModels.containsKey(kBaseName);
}
@Override
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/KieProject.java b/drools-compiler/src/main/java/org/kie/builder/impl/KieProject.java
index e65d5e4e262..ffa3828c537 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/KieProject.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/KieProject.java
@@ -1,13 +1,13 @@
package org.kie.builder.impl;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieBaseModel;
import org.kie.builder.KieSessionModel;
import org.kie.internal.utils.CompositeClassLoader;
public interface KieProject {
- GAV getGAV();
+ ReleaseId getGAV();
InternalKieModule getKieModuleForKBase(String kBaseName);
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/KieRepositoryImpl.java b/drools-compiler/src/main/java/org/kie/builder/impl/KieRepositoryImpl.java
index 32635dfb4a8..1c11b1cc162 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/KieRepositoryImpl.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/KieRepositoryImpl.java
@@ -1,13 +1,12 @@
package org.kie.builder.impl;
import org.drools.io.internal.InternalResource;
-import org.drools.kproject.GAVImpl;
+import org.drools.kproject.ReleaseIdImpl;
import org.drools.kproject.models.KieModuleModelImpl;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieModule;
import org.kie.builder.KieRepository;
import org.kie.builder.KieScanner;
-import org.kie.builder.Results;
import org.kie.internal.utils.ServiceRegistryImpl;
import org.kie.io.Resource;
import org.kie.runtime.KieContainer;
@@ -42,17 +41,17 @@ public class KieRepositoryImpl
private final KieModuleRepo kieModuleRepo = new KieModuleRepo();
- private final AtomicReference<GAV> defaultGAV = new AtomicReference( new GAVImpl( DEFAULT_GROUP,
+ private final AtomicReference<ReleaseId> defaultGAV = new AtomicReference( new ReleaseIdImpl( DEFAULT_GROUP,
DEFAULT_ARTIFACT,
DEFAULT_VERSION ) );
private InternalKieScanner internalKieScanner;
- public void setDefaultGAV(GAV gav) {
- this.defaultGAV.set( gav );
+ public void setDefaultGAV(ReleaseId releaseId) {
+ this.defaultGAV.set(releaseId);
}
- public GAV getDefaultGAV() {
+ public ReleaseId getDefaultReleaseId() {
return this.defaultGAV.get();
}
@@ -61,34 +60,34 @@ public void addKieModule(KieModule kieModule) {
log.info( "KieModule was added:" + kieModule);
}
- public KieModule getKieModule(GAV gav) {
- VersionRange versionRange = new VersionRange(gav.getVersion());
+ public KieModule getKieModule(ReleaseId releaseId) {
+ VersionRange versionRange = new VersionRange(releaseId.getVersion());
- KieModule kieModule = kieModuleRepo.load(gav, versionRange);
+ KieModule kieModule = kieModuleRepo.load(releaseId, versionRange);
if ( kieModule == null ) {
- log.debug( "KieModule Lookup. GAV {} was not in cache, checking classpath",
- gav.toExternalForm() );
- kieModule = checkClasspathForKieModule(gav);
+ log.debug( "KieModule Lookup. ReleaseId {} was not in cache, checking classpath",
+ releaseId.toExternalForm() );
+ kieModule = checkClasspathForKieModule(releaseId);
}
if ( kieModule == null ) {
- log.debug( "KieModule Lookup. GAV {} was not in cache, checking maven repository",
- gav.toExternalForm() );
- kieModule = loadKieModuleFromMavenRepo(gav);
+ log.debug( "KieModule Lookup. ReleaseId {} was not in cache, checking maven repository",
+ releaseId.toExternalForm() );
+ kieModule = loadKieModuleFromMavenRepo(releaseId);
}
return kieModule;
}
- private KieModule checkClasspathForKieModule(GAV gav) {
+ private KieModule checkClasspathForKieModule(ReleaseId releaseId) {
// TODO
// ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
- // URL url = classLoader.getResource( ((GAVImpl)gav).getPomPropertiesPath() );
+ // URL url = classLoader.getResource( ((ReleaseIdImpl)releaseId).getPomPropertiesPath() );
return null;
}
- private KieModule loadKieModuleFromMavenRepo(GAV gav) {
- return getInternalKieScanner().loadArtifact( gav );
+ private KieModule loadKieModuleFromMavenRepo(ReleaseId releaseId) {
+ return getInternalKieScanner().loadArtifact(releaseId);
}
private InternalKieScanner getInternalKieScanner() {
@@ -110,7 +109,7 @@ private static class DummyKieScanner
public void setKieContainer(KieContainer kieContainer) {
}
- public KieModule loadArtifact(GAV gav) {
+ public KieModule loadArtifact(ReleaseId releaseId) {
return null;
}
@@ -166,26 +165,26 @@ private static class KieModuleRepo {
private final Map<String, TreeMap<ComparableVersion, KieModule>> kieModules = new HashMap<String, TreeMap<ComparableVersion, KieModule>>();
void store(KieModule kieModule) {
- GAV gav = kieModule.getGAV();
- String ga = gav.getGroupId() + ":" + gav.getArtifactId();
+ ReleaseId releaseId = kieModule.getReleaseId();
+ String ga = releaseId.getGroupId() + ":" + releaseId.getArtifactId();
TreeMap<ComparableVersion, KieModule> artifactMap = kieModules.get(ga);
if (artifactMap == null) {
artifactMap = new TreeMap<ComparableVersion, KieModule>();
kieModules.put(ga, artifactMap);
}
- artifactMap.put(new ComparableVersion(gav.getVersion()), kieModule);
+ artifactMap.put(new ComparableVersion(releaseId.getVersion()), kieModule);
}
- KieModule load(GAV gav, VersionRange versionRange) {
- String ga = gav.getGroupId() + ":" + gav.getArtifactId();
+ KieModule load(ReleaseId releaseId, VersionRange versionRange) {
+ String ga = releaseId.getGroupId() + ":" + releaseId.getArtifactId();
TreeMap<ComparableVersion, KieModule> artifactMap = kieModules.get(ga);
if (artifactMap == null) {
return null;
}
if (versionRange.fixed) {
- return artifactMap.get(new ComparableVersion(gav.getVersion()));
+ return artifactMap.get(new ComparableVersion(releaseId.getVersion()));
}
if (versionRange.upperBound == null) {
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/KieServicesImpl.java b/drools-compiler/src/main/java/org/kie/builder/impl/KieServicesImpl.java
index ae912e9daec..a5ab109b239 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/KieServicesImpl.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/KieServicesImpl.java
@@ -4,11 +4,11 @@
import org.drools.command.impl.CommandFactoryServiceImpl;
import org.drools.concurrent.ExecutorProviderImpl;
import org.drools.io.impl.ResourceFactoryServiceImpl;
-import org.drools.kproject.GAVImpl;
+import org.drools.kproject.ReleaseIdImpl;
import org.drools.kproject.models.KieModuleModelImpl;
import org.drools.marshalling.impl.MarshallerProviderImpl;
import org.kie.KieServices;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieBuilder;
import org.kie.builder.KieFileSystem;
import org.kie.builder.KieModuleModel;
@@ -69,10 +69,10 @@ public void nullKieClasspathContainer() {
}
}
- public KieContainer newKieContainer(GAV gav) {
- InternalKieModule kieModule = (InternalKieModule) getRepository().getKieModule(gav);
+ public KieContainer newKieContainer(ReleaseId releaseId) {
+ InternalKieModule kieModule = (InternalKieModule) getRepository().getKieModule(releaseId);
if (kieModule == null) {
- throw new RuntimeException("Cannot find KieModule: " + gav);
+ throw new RuntimeException("Cannot find KieModule: " + releaseId);
}
KieProject kProject = new KieModuleKieProject( kieModule, getRepository() );
return new KieContainerImpl( kProject, getRepository() );
@@ -122,8 +122,8 @@ public KieStoreServices getStoreServices() {
return ServiceRegistryImpl.getInstance().get( KieStoreServices.class );
}
- public GAV newGav(String groupId, String artifactId, String version) {
- return new GAVImpl(groupId, artifactId, version);
+ public ReleaseId newReleaseId(String groupId, String artifactId, String version) {
+ return new ReleaseIdImpl(groupId, artifactId, version);
}
public KieModuleModel newKieModuleModel() {
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/MemoryKieModule.java b/drools-compiler/src/main/java/org/kie/builder/impl/MemoryKieModule.java
index dd61a6d7b99..a3de9fdac1d 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/MemoryKieModule.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/MemoryKieModule.java
@@ -3,7 +3,7 @@
import org.drools.commons.jci.readers.ResourceReader;
import org.drools.compiler.io.memory.MemoryFileSystem;
import org.drools.kproject.models.KieModuleModelImpl;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieModuleModel;
import java.io.File;
@@ -13,12 +13,12 @@ public class MemoryKieModule extends AbstractKieModule implements ResourceReader
private final MemoryFileSystem mfs;
- public MemoryKieModule(GAV gav) {
- this(gav, new KieModuleModelImpl(), new MemoryFileSystem());
+ public MemoryKieModule(ReleaseId releaseId) {
+ this(releaseId, new KieModuleModelImpl(), new MemoryFileSystem());
}
- public MemoryKieModule(GAV gav, KieModuleModel kieProject, MemoryFileSystem mfs) {
- super(gav, kieProject);
+ public MemoryKieModule(ReleaseId releaseId, KieModuleModel kieProject, MemoryFileSystem mfs) {
+ super(releaseId, kieProject);
this.mfs = mfs;
}
@@ -52,6 +52,6 @@ public byte[] getBytes() {
}
public String toString() {
- return "MemoryKieModule[ GAV=" + getGAV() + "]";
+ return "MemoryKieModule[ ReleaseId=" + getReleaseId() + "]";
}
}
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/ZipKieModule.java b/drools-compiler/src/main/java/org/kie/builder/impl/ZipKieModule.java
index 056355f5b76..f3283893e62 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/ZipKieModule.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/ZipKieModule.java
@@ -2,7 +2,7 @@
import org.drools.core.util.IoUtils;
import org.drools.kproject.models.KieModuleModelImpl;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieModuleModel;
import java.io.File;
@@ -16,14 +16,14 @@ public class ZipKieModule extends AbstractKieModule implements InternalKieModule
private final File file;
private Map<String, ZipEntry> zipEntries;
- public ZipKieModule(GAV gav, File jar) {
- this(gav, getKieModuleModelFromJar(jar), jar);
+ public ZipKieModule(ReleaseId releaseId, File jar) {
+ this(releaseId, getKieModuleModelFromJar(jar), jar);
}
- public ZipKieModule(GAV gav,
+ public ZipKieModule(ReleaseId releaseId,
KieModuleModel kieProject,
File file) {
- super( gav, kieProject );
+ super(releaseId, kieProject );
this.file = file;
this.zipEntries = IoUtils.buildZipFileMapEntries( file );
}
@@ -32,7 +32,7 @@ private static KieModuleModel getKieModuleModelFromJar(File jar) {
ZipFile zipFile = null;
try {
zipFile = new ZipFile( jar );
- ZipEntry zipEntry = zipFile.getEntry( KieModuleModelImpl.KMODULE_JAR_PATH );
+ ZipEntry zipEntry = zipFile.getEntry(KieModuleModelImpl.KMODULE_JAR_PATH);
return KieModuleModelImpl.fromXML(zipFile.getInputStream(zipEntry));
} catch ( Exception e ) {
throw new RuntimeException("Unable to load kmodule.xml from " + jar.getAbsolutePath(), e);
@@ -92,6 +92,6 @@ public byte[] getBytes() {
}
public String toString() {
- return "ZipKieModule[ GAV=" + getGAV() + "file=" + file + "]";
+ return "ZipKieModule[ ReleaseId=" + getReleaseId() + "file=" + file + "]";
}
}
diff --git a/drools-compiler/src/test/java/org/drools/builder/KieBuilderTest.java b/drools-compiler/src/test/java/org/drools/builder/KieBuilderTest.java
index 84a07a57756..239d2610504 100644
--- a/drools-compiler/src/test/java/org/drools/builder/KieBuilderTest.java
+++ b/drools-compiler/src/test/java/org/drools/builder/KieBuilderTest.java
@@ -3,7 +3,7 @@
import org.drools.compiler.io.memory.MemoryFileSystem;
import org.drools.core.util.FileManager;
-import org.drools.kproject.GAVImpl;
+import org.drools.kproject.ReleaseIdImpl;
import org.drools.kproject.models.KieBaseModelImpl;
import org.junit.After;
import org.junit.Before;
@@ -11,7 +11,7 @@
import org.junit.Test;
import org.kie.KieBase;
import org.kie.KieServices;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieBaseModel;
import org.kie.builder.KieBuilder;
import org.kie.builder.KieFileSystem;
@@ -54,14 +54,14 @@ public void tearDown() throws Exception {
public void testInMemory() throws ClassNotFoundException, InterruptedException, IOException {
String namespace = "org.kie.test";
- GAV gav = KieServices.Factory.get().newGav( namespace, "memory", "1.0-SNAPSHOT" );
+ ReleaseId releaseId = KieServices.Factory.get().newReleaseId(namespace, "memory", "1.0-SNAPSHOT");
KieModuleModel kProj = createKieProject(namespace);
KieFileSystem kfs = KieServices.Factory.get().newKieFileSystem();
- generateAll(kfs, namespace, gav, kProj);
+ generateAll(kfs, namespace, releaseId, kProj);
- createAndTestKieContainer(gav, createKieBuilder(kfs), namespace );
+ createAndTestKieContainer(releaseId, createKieBuilder(kfs), namespace );
}
@Test
@@ -70,16 +70,16 @@ public void testOnDisc() throws ClassNotFoundException, InterruptedException, IO
KieModuleModel kProj = createKieProject(namespace);
- GAV gav = KieServices.Factory.get().newGav( namespace, "memory", "1.0-SNAPSHOT" );
+ ReleaseId releaseId = KieServices.Factory.get().newReleaseId(namespace, "memory", "1.0-SNAPSHOT");
KieFileSystem kfs = KieServices.Factory.get().newKieFileSystem();
- generateAll(kfs, namespace, gav, kProj);
+ generateAll(kfs, namespace, releaseId, kProj);
MemoryFileSystem mfs = ((KieFileSystemImpl)kfs).asMemoryFileSystem();
File file = fileManager.getRootDirectory() ;
mfs.writeAsFs( file );
- createAndTestKieContainer(gav, createKieBuilder(kfs), namespace);
+ createAndTestKieContainer(releaseId, createKieBuilder(kfs), namespace);
}
@Test
@@ -87,10 +87,10 @@ public void testKieModuleDepednencies() throws ClassNotFoundException, Interrupt
KieServices ks = KieServices.Factory.get();
String namespace1 = "org.kie.test1";
- GAV gav1 = KieServices.Factory.get().newGav( namespace1, "memory", "1.0-SNAPSHOT" );
+ ReleaseId releaseId1 = KieServices.Factory.get().newReleaseId(namespace1, "memory", "1.0-SNAPSHOT");
KieModuleModel kProj1 = createKieProject(namespace1);
KieFileSystem kfs1 = KieServices.Factory.get().newKieFileSystem();
- generateAll(kfs1, namespace1, gav1, kProj1);
+ generateAll(kfs1, namespace1, releaseId1, kProj1);
KieBuilder kb1 = createKieBuilder(kfs1);
kb1.buildAll();
@@ -98,18 +98,18 @@ public void testKieModuleDepednencies() throws ClassNotFoundException, Interrupt
fail("Unable to build KieJar\n" + kb1.getResults( ).toString() );
}
KieRepository kr = ks.getRepository();
- KieModule kModule1 = kr.getKieModule(gav1);
+ KieModule kModule1 = kr.getKieModule(releaseId1);
assertNotNull( kModule1 );
String namespace2 = "org.kie.test2";
- GAV gav2 = KieServices.Factory.get().newGav( namespace2, "memory", "1.0-SNAPSHOT" );
+ ReleaseId releaseId2 = KieServices.Factory.get().newReleaseId(namespace2, "memory", "1.0-SNAPSHOT");
KieModuleModel kProj2 = createKieProject(namespace2);
KieBaseModelImpl kieBase2 = ( KieBaseModelImpl ) kProj2.getKieBaseModels().get( namespace2 );
kieBase2.addInclude( namespace1 );
KieFileSystem kfs2 = KieServices.Factory.get().newKieFileSystem();
- generateAll(kfs2, namespace2, gav2, kProj2);
+ generateAll(kfs2, namespace2, releaseId2, kProj2);
KieBuilder kb2 = createKieBuilder(kfs2);
@@ -118,10 +118,10 @@ public void testKieModuleDepednencies() throws ClassNotFoundException, Interrupt
if ( kb2.getResults().hasMessages(Level.ERROR) ) {
fail("Unable to build KieJar\n" + kb2.getResults( ).toString() );
}
- KieModule kModule2= kr.getKieModule(gav2);
+ KieModule kModule2= kr.getKieModule(releaseId2);
assertNotNull( kModule2);
- KieContainer kContainer = ks.newKieContainer(gav2);
+ KieContainer kContainer = ks.newKieContainer(releaseId2);
KieBase kBase = kContainer.getKieBase( namespace2 );
KieSession kSession = kBase.newKieSession();
@@ -144,7 +144,7 @@ public void testNoPomXml() throws ClassNotFoundException, InterruptedException,
KieModuleModel kProj = createKieProject(namespace);
- GAV gav = KieServices.Factory.get().getRepository().getDefaultGAV();
+ ReleaseId releaseId = KieServices.Factory.get().getRepository().getDefaultReleaseId();
KieFileSystem kfs = KieServices.Factory.get().newKieFileSystem();
generateKProjectXML( kfs, namespace, kProj );
@@ -153,29 +153,29 @@ public void testNoPomXml() throws ClassNotFoundException, InterruptedException,
MemoryFileSystem mfs = ((KieFileSystemImpl)kfs).asMemoryFileSystem();
- createAndTestKieContainer(gav, createKieBuilder(kfs), namespace );
+ createAndTestKieContainer(releaseId, createKieBuilder(kfs), namespace );
}
@Test @Ignore
public void testNoProjectXml() throws ClassNotFoundException, InterruptedException, IOException {
String namespace = "org.kie.test";
- GAV gav = KieServices.Factory.get().newGav( namespace, "memory", "1.0-SNAPSHOT" );
+ ReleaseId releaseId = KieServices.Factory.get().newReleaseId(namespace, "memory", "1.0-SNAPSHOT");
KieFileSystem kfs = KieServices.Factory.get().newKieFileSystem();
- generatePomXML(kfs, gav);
+ generatePomXML(kfs, releaseId);
generateMessageClass( kfs, namespace );
generateRule( kfs, namespace );
MemoryFileSystem mfs = ((KieFileSystemImpl)kfs).asMemoryFileSystem();
- createAndTestKieContainer(gav, createKieBuilder(kfs), null );
+ createAndTestKieContainer(releaseId, createKieBuilder(kfs), null );
}
public void testNoPomAndProjectXml() throws ClassNotFoundException, InterruptedException, IOException {
String namespace = "org.kie.test";
- GAV gav = KieServices.Factory.get().getRepository().getDefaultGAV();
+ ReleaseId releaseId = KieServices.Factory.get().getRepository().getDefaultReleaseId();
KieFileSystem kfs = KieServices.Factory.get().newKieFileSystem();
generateMessageClass( kfs, namespace );
@@ -183,7 +183,7 @@ public void testNoPomAndProjectXml() throws ClassNotFoundException, InterruptedE
MemoryFileSystem mfs = ((KieFileSystemImpl)kfs).asMemoryFileSystem();
- createAndTestKieContainer(gav, createKieBuilder(kfs), null );
+ createAndTestKieContainer(releaseId, createKieBuilder(kfs), null );
}
@Test
@@ -192,10 +192,10 @@ public void testInvalidPomXmlGAV() throws ClassNotFoundException, InterruptedExc
KieModuleModel kProj = createKieProject(namespace);
- GAV gav = new GAVImpl( "", "", "" );
+ ReleaseId releaseId = new ReleaseIdImpl( "", "", "" );
KieFileSystem kfs = KieServices.Factory.get().newKieFileSystem();
- generatePomXML(kfs, gav);
+ generatePomXML(kfs, releaseId);
generateMessageClass( kfs, namespace );
generateRule( kfs, namespace );
@@ -213,7 +213,7 @@ public void testInvalidPomXmlContent() throws ClassNotFoundException, Interrupte
KieModuleModel kProj = createKieProject(namespace);
- GAV gav = KieServices.Factory.get().newGav( namespace, "memory", "1.0-SNAPSHOT" );
+ ReleaseId releaseId = KieServices.Factory.get().newReleaseId(namespace, "memory", "1.0-SNAPSHOT");
KieFileSystem kfs = KieServices.Factory.get().newKieFileSystem();
kfs.write( "pom.xml", "xxxx" );
@@ -232,10 +232,10 @@ public void testInvalidProjectXml() throws ClassNotFoundException, InterruptedEx
KieModuleModel kProj = createKieProject(namespace);
- GAV gav = KieServices.Factory.get().newGav( namespace, "memory", "1.0-SNAPSHOT" );
+ ReleaseId releaseId = KieServices.Factory.get().newReleaseId(namespace, "memory", "1.0-SNAPSHOT");
KieFileSystem kfs = KieServices.Factory.get().newKieFileSystem();
- generatePomXML(kfs, gav);
+ generatePomXML(kfs, releaseId);
kfs.writeKModuleXML("xxxx" );
generateMessageClass( kfs, namespace );
generateRule( kfs, namespace );
@@ -257,16 +257,16 @@ public KieModuleModel createKieProject(String namespace) {
return kProj;
}
- public void generateAll(KieFileSystem kfs, String namespace, GAV gav, KieModuleModel kProj) {
- generatePomXML(kfs, gav);
+ public void generateAll(KieFileSystem kfs, String namespace, ReleaseId releaseId, KieModuleModel kProj) {
+ generatePomXML(kfs, releaseId);
generateKProjectXML( kfs, namespace, kProj );
generateMessageClass( kfs, namespace );
generateRule( kfs, namespace );
}
- public void generatePomXML(KieFileSystem kfs, GAV gav) {
- kfs.writePomXML( KieBuilderImpl.generatePomXml( gav ) );
+ public void generatePomXML(KieFileSystem kfs, ReleaseId releaseId) {
+ kfs.writePomXML( KieBuilderImpl.generatePomXml(releaseId) );
}
public void generateKProjectXML(KieFileSystem kfs, String namespace, KieModuleModel kProj) {
@@ -291,7 +291,7 @@ public KieBuilder createKieBuilder(File file) {
return ks.newKieBuilder( file );
}
- public void createAndTestKieContainer(GAV gav, KieBuilder kb, String kBaseName) throws IOException,
+ public void createAndTestKieContainer(ReleaseId releaseId, KieBuilder kb, String kBaseName) throws IOException,
ClassNotFoundException,
InterruptedException {
KieServices ks = KieServices.Factory.get();
@@ -302,10 +302,10 @@ public void createAndTestKieContainer(GAV gav, KieBuilder kb, String kBaseName)
fail("Unable to build KieModule\n" + kb.getResults( ).toString() );
}
KieRepository kr = ks.getRepository();
- KieModule kJar = kr.getKieModule(gav);
+ KieModule kJar = kr.getKieModule(releaseId);
assertNotNull( kJar );
- KieContainer kContainer = ks.newKieContainer(gav);
+ KieContainer kContainer = ks.newKieContainer(releaseId);
KieBase kBase = kBaseName != null ? kContainer.getKieBase( kBaseName ) : kContainer.getKieBase();
KieSession kSession = kBase.newKieSession();
diff --git a/drools-compiler/src/test/java/org/drools/cdi/CDIGAVTest.java b/drools-compiler/src/test/java/org/drools/cdi/CDIGAVTest.java
index c3c4c1914cf..371b234cb6d 100644
--- a/drools-compiler/src/test/java/org/drools/cdi/CDIGAVTest.java
+++ b/drools-compiler/src/test/java/org/drools/cdi/CDIGAVTest.java
@@ -1,47 +1,7 @@
package org.drools.cdi;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Set;
-
-import javax.enterprise.context.ApplicationScoped;
-import javax.enterprise.context.SessionScoped;
-import javax.enterprise.inject.spi.Bean;
-import javax.enterprise.inject.spi.BeanManager;
-import javax.enterprise.util.AnnotationLiteral;
-import javax.inject.Inject;
-
-import org.drools.cdi.example.CDIExamplesTest;
-import org.drools.cdi.example.Message;
-import org.drools.cdi.example.Message2;
-import org.drools.cdi.example.Message2Impl1;
-import org.drools.cdi.example.Message2Impl2;
-import org.drools.cdi.example.MessageImpl;
-import org.drools.cdi.example.MessageProducers;
-import org.drools.cdi.example.MessageProducers2;
-import org.drools.cdi.example.Msg;
-import org.drools.cdi.example.Msg1;
-import org.drools.cdi.example.Msg2;
-import org.drools.kproject.AbstractKnowledgeTest;
-import org.drools.kproject.KPTest;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
import org.junit.Ignore;
-import org.junit.Test;
import org.junit.runner.RunWith;
-import org.kie.KieBase;
-import org.kie.KieServices;
-import org.kie.KnowledgeBase;
-import org.kie.builder.KieRepository;
-import org.kie.cdi.KBase;
-import org.kie.cdi.KGAV;
-import org.kie.cdi.KSession;
-import org.kie.command.KieCommands;
-import org.kie.runtime.KieSession;
-import org.kie.runtime.StatelessKieSession;
-
-import static org.junit.Assert.*;
@RunWith(CDITestRunner.class)
@Ignore
@@ -49,37 +9,37 @@ public class CDIGAVTest {
// public static AbstractKnowledgeTest helper;
//
// @Inject
-// @KBase("jar1.KBase1") @KGAV(groupId = "jar1",
+// @KBase("jar1.KBase1") @KReleaseId(groupId = "jar1",
// artifactId = "art1",
// version = "1.0")
// private KieBase jar1KBase1v10;
//
// @Inject
-// @KBase("jar1.KBase1") @KGAV(groupId = "jar1",
+// @KBase("jar1.KBase1") @KReleaseId(groupId = "jar1",
// artifactId = "art1",
// version = "1.1")
// private KieBase jar1KBase1v11;
//
// @Inject
-// @KSession("jar1.KSession1") @KGAV( groupId = "jar1",
+// @KSession("jar1.KSession1") @KReleaseId( groupId = "jar1",
// artifactId = "art1",
// version = "1.0" )
// private StatelessKieSession kbase1ksession1v10;
//
// @Inject
-// @KSession("jar1.KSession1") @KGAV( groupId = "jar1",
+// @KSession("jar1.KSession1") @KReleaseId( groupId = "jar1",
// artifactId = "art1",
// version = "1.1" )
// private StatelessKieSession kbase1ksession1v11;
//
// @Inject
-// @KSession("jar1.KSession2") @KGAV( groupId = "jar1",
+// @KSession("jar1.KSession2") @KReleaseId( groupId = "jar1",
// artifactId = "art1",
// version = "1.0" )
// private KieSession kbase1ksession2v10;
//
// @Inject
-// @KSession("jar1.KSession2") @KGAV( groupId = "jar1",
+// @KSession("jar1.KSession2") @KReleaseId( groupId = "jar1",
// artifactId = "art1",
// version = "1.1" )
// private KieSession kbase1ksession2v11;
diff --git a/drools-compiler/src/test/java/org/drools/cdi/CDINamedTest.java b/drools-compiler/src/test/java/org/drools/cdi/CDINamedTest.java
index 4ed945221c0..ad7814c5a11 100644
--- a/drools-compiler/src/test/java/org/drools/cdi/CDINamedTest.java
+++ b/drools-compiler/src/test/java/org/drools/cdi/CDINamedTest.java
@@ -1,48 +1,7 @@
package org.drools.cdi;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Set;
-
-import javax.enterprise.context.ApplicationScoped;
-import javax.enterprise.context.SessionScoped;
-import javax.enterprise.inject.spi.Bean;
-import javax.enterprise.inject.spi.BeanManager;
-import javax.enterprise.util.AnnotationLiteral;
-import javax.inject.Inject;
-import javax.inject.Named;
-
-import org.drools.cdi.example.CDIExamplesTest;
-import org.drools.cdi.example.Message;
-import org.drools.cdi.example.Message2;
-import org.drools.cdi.example.Message2Impl1;
-import org.drools.cdi.example.Message2Impl2;
-import org.drools.cdi.example.MessageImpl;
-import org.drools.cdi.example.MessageProducers;
-import org.drools.cdi.example.MessageProducers2;
-import org.drools.cdi.example.Msg;
-import org.drools.cdi.example.Msg1;
-import org.drools.cdi.example.Msg2;
-import org.drools.kproject.AbstractKnowledgeTest;
-import org.drools.kproject.KPTest;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
import org.junit.Ignore;
-import org.junit.Test;
import org.junit.runner.RunWith;
-import org.kie.KieBase;
-import org.kie.KieServices;
-import org.kie.KnowledgeBase;
-import org.kie.builder.KieRepository;
-import org.kie.cdi.KBase;
-import org.kie.cdi.KGAV;
-import org.kie.cdi.KSession;
-import org.kie.command.KieCommands;
-import org.kie.runtime.KieSession;
-import org.kie.runtime.StatelessKieSession;
-
-import static org.junit.Assert.*;
@RunWith(CDITestRunner.class)
@Ignore
@@ -52,7 +11,7 @@ public class CDINamedTest {
// @Inject
// @KBase("jar1.KBase1")
// @Named("kb1")
-// @KGAV(groupId = "jar1",
+// @KReleaseId(groupId = "jar1",
// artifactId = "art1",
// version = "1.0")
// private KieBase jar1KBase1kb1;
@@ -60,7 +19,7 @@ public class CDINamedTest {
// @Inject
// @KBase("jar1.KBase1")
// @Named("kb2")
-// @KGAV(groupId = "jar1",
+// @KReleaseId(groupId = "jar1",
// artifactId = "art1",
// version = "1.0")
// private KieBase jar1KBase1kb2;
@@ -68,14 +27,14 @@ public class CDINamedTest {
// @Inject
// @KBase("jar1.KBase1")
// @Named("kb2")
-// @KGAV(groupId = "jar1",
+// @KReleaseId(groupId = "jar1",
// artifactId = "art1",
// version = "1.0")
// private KieBase jar1KBase1kb22;
//
// @Inject
// @KSession("jar1.KSession1")
-// @KGAV( groupId = "jar1",
+// @KReleaseId( groupId = "jar1",
// artifactId = "art1",
// version = "1.0" )
// @Named("sks1")
@@ -83,7 +42,7 @@ public class CDINamedTest {
//
// @Inject
// @KSession("jar1.KSession1")
-// @KGAV( groupId = "jar1",
+// @KReleaseId( groupId = "jar1",
// artifactId = "art1",
// version = "1.0" )
// @Named("sks2")
@@ -91,7 +50,7 @@ public class CDINamedTest {
//
// @Inject
// @KSession("jar1.KSession1")
-// @KGAV( groupId = "jar1",
+// @KReleaseId( groupId = "jar1",
// artifactId = "art1",
// version = "1.0" )
// @Named("sks2")
@@ -99,7 +58,7 @@ public class CDINamedTest {
//
// @Inject
// @KSession("jar1.KSession2")
-// @KGAV( groupId = "jar1",
+// @KReleaseId( groupId = "jar1",
// artifactId = "art1",
// version = "1.0" )
// @Named("ks1")
@@ -107,7 +66,7 @@ public class CDINamedTest {
//
// @Inject
// @KSession("jar1.KSession2")
-// @KGAV( groupId = "jar1",
+// @KReleaseId( groupId = "jar1",
// artifactId = "art1",
// version = "1.0" )
// @Named("ks2")
@@ -115,7 +74,7 @@ public class CDINamedTest {
//
// @Inject
// @KSession("jar1.KSession2")
-// @KGAV( groupId = "jar1",
+// @KReleaseId( groupId = "jar1",
// artifactId = "art1",
// version = "1.0" )
// @Named("ks2")
diff --git a/drools-compiler/src/test/java/org/drools/cdi/CDITestRunner.java b/drools-compiler/src/test/java/org/drools/cdi/CDITestRunner.java
index 0d96f89a69e..078750b094b 100644
--- a/drools-compiler/src/test/java/org/drools/cdi/CDITestRunner.java
+++ b/drools-compiler/src/test/java/org/drools/cdi/CDITestRunner.java
@@ -23,7 +23,7 @@
import org.kie.builder.impl.KieRepositoryImpl;
import org.kie.builder.impl.KieServicesImpl;
import org.kie.cdi.KBase;
-import org.kie.cdi.KGAV;
+import org.kie.cdi.KReleaseId;
import org.kie.cdi.KSession;
import org.kie.command.KieCommands;
import org.kie.io.KieResources;
@@ -49,7 +49,7 @@ public static Weld createWeld(String... classes) {
list.add( KieCDIExtension.class.getName() );
list.add( KBase.class.getName() );
list.add( KSession.class.getName() );
- list.add( KGAV.class.getName() );
+ list.add( KReleaseId.class.getName() );
list.add( KieServices.class.getName() );
list.add( KieServicesImpl.class.getName() );
list.add( KieRepository.class.getName() );
diff --git a/drools-compiler/src/test/java/org/drools/cdi/KieContainerInjectionTest.java b/drools-compiler/src/test/java/org/drools/cdi/KieContainerInjectionTest.java
index 56539ca39b8..2c7bc15fd8f 100644
--- a/drools-compiler/src/test/java/org/drools/cdi/KieContainerInjectionTest.java
+++ b/drools-compiler/src/test/java/org/drools/cdi/KieContainerInjectionTest.java
@@ -1,14 +1,10 @@
package org.drools.cdi;
-import javax.inject.Inject;
-
import org.junit.Ignore;
-import org.kie.cdi.KGAV;
-import org.kie.runtime.KieContainer;
@Ignore
public class KieContainerInjectionTest {
//
-// //@Inject @KGAV(groupId="org.drools", artifactId="drools-core")
+// //@Inject @KReleaseId(groupId="org.drools", artifactId="drools-core")
// KieContainer kr2;
}
diff --git a/drools-compiler/src/test/java/org/drools/cdi/KieServicesInjectionTest.java b/drools-compiler/src/test/java/org/drools/cdi/KieServicesInjectionTest.java
index 03f6884af18..fbc3f2b0b9f 100644
--- a/drools-compiler/src/test/java/org/drools/cdi/KieServicesInjectionTest.java
+++ b/drools-compiler/src/test/java/org/drools/cdi/KieServicesInjectionTest.java
@@ -2,22 +2,14 @@
import javax.inject.Inject;
-import org.drools.kproject.AbstractKnowledgeTest;
import org.junit.AfterClass;
import org.junit.BeforeClass;
-import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
-import org.kie.KieBase;
import org.kie.KieServices;
import org.kie.builder.KieRepository;
-import org.kie.cdi.KBase;
-import org.kie.cdi.KGAV;
-import org.kie.cdi.KSession;
import org.kie.command.KieCommands;
import org.kie.io.KieResources;
-import org.kie.runtime.KieContainer;
-import org.kie.runtime.KieSession;
import static org.junit.Assert.*;
@@ -64,7 +56,7 @@ public void testKieServicesInjection() {
@Test
public void testKieRepositoryInjection() {
assertNotNull( kr );
- assertNotNull( kr.getDefaultGAV() );
+ assertNotNull( kr.getDefaultReleaseId() );
}
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/KieHelloWorldTest.java b/drools-compiler/src/test/java/org/drools/integrationtests/KieHelloWorldTest.java
index 703c304d82a..141d275a054 100644
--- a/drools-compiler/src/test/java/org/drools/integrationtests/KieHelloWorldTest.java
+++ b/drools-compiler/src/test/java/org/drools/integrationtests/KieHelloWorldTest.java
@@ -4,7 +4,7 @@
import org.drools.Message;
import org.junit.Test;
import org.kie.KieServices;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieBaseModel;
import org.kie.builder.KieBuilder;
import org.kie.builder.KieFileSystem;
@@ -34,7 +34,7 @@ public void testHelloWorld() throws Exception {
KieFileSystem kfs = ks.newKieFileSystem().write( "src/main/resources/r1.drl", drl );
ks.newKieBuilder( kfs ).buildAll();
- KieSession ksession = ks.newKieContainer(ks.getRepository().getDefaultGAV()).newKieSession();
+ KieSession ksession = ks.newKieContainer(ks.getRepository().getDefaultReleaseId()).newKieSession();
ksession.insert(new Message("Hello World"));
int count = ksession.fireAllRules();
@@ -75,16 +75,16 @@ public void testHelloWorldWithPackages() throws Exception {
KieServices ks = KieServices.Factory.get();
- GAV gav = ks.newGav("org.kie", "hello-world", "1.0-SNAPSHOT");
+ ReleaseId releaseId = ks.newReleaseId("org.kie", "hello-world", "1.0-SNAPSHOT");
KieFileSystem kfs = ks.newKieFileSystem()
- .generateAndWritePomXML( gav )
+ .generateAndWritePomXML(releaseId)
.write("src/main/resources/KBase1/org/pkg1/r1.drl", drl1)
.write("src/main/resources/KBase1/org/pkg2/r2.drl", drl2)
.writeKModuleXML(createKieProjectWithPackages(ks, "org.pkg1").toXML());
ks.newKieBuilder( kfs ).buildAll();
- KieSession ksession = ks.newKieContainer(gav).newKieSession("KSession1");
+ KieSession ksession = ks.newKieContainer(releaseId).newKieSession("KSession1");
ksession.insert(new Message("Hello World"));
int count = ksession.fireAllRules();
@@ -108,16 +108,16 @@ public void testHelloWorldWithWildcardPackages() throws Exception {
KieServices ks = KieServices.Factory.get();
- GAV gav = ks.newGav("org.kie", "hello-world", "1.0-SNAPSHOT");
+ ReleaseId releaseId = ks.newReleaseId("org.kie", "hello-world", "1.0-SNAPSHOT");
KieFileSystem kfs = ks.newKieFileSystem()
- .generateAndWritePomXML( gav )
+ .generateAndWritePomXML(releaseId)
.write("src/main/resources/KBase1/org/pkg1/test/r1.drl", drl1)
.write("src/main/resources/KBase1/org/pkg2/test/r2.drl", drl2)
.writeKModuleXML( createKieProjectWithPackages(ks, "org.pkg1.*").toXML());
ks.newKieBuilder( kfs ).buildAll();
- KieSession ksession = ks.newKieContainer(gav).newKieSession("KSession1");
+ KieSession ksession = ks.newKieContainer(releaseId).newKieSession("KSession1");
ksession.insert(new Message("Hello World"));
int count = ksession.fireAllRules();
@@ -148,33 +148,33 @@ public void testHelloWorldOnVersionRange() throws Exception {
buildVersion(ks, "Aloha Earth", "1.1");
buildVersion(ks, "Hi Universe", "1.2");
- GAV latestGav = ks.newGav("org.kie", "hello-world", "LATEST");
+ ReleaseId latestReleaseId = ks.newReleaseId("org.kie", "hello-world", "LATEST");
- KieSession ksession = ks.newKieContainer(latestGav).newKieSession("KSession1");
+ KieSession ksession = ks.newKieContainer(latestReleaseId).newKieSession("KSession1");
ksession.insert(new Message("Hello World"));
assertEquals( 0, ksession.fireAllRules() );
- ksession = ks.newKieContainer(latestGav).newKieSession("KSession1");
+ ksession = ks.newKieContainer(latestReleaseId).newKieSession("KSession1");
ksession.insert(new Message("Hi Universe"));
assertEquals( 1, ksession.fireAllRules() );
- GAV gav1 = ks.newGav("org.kie", "hello-world", "1.0");
+ ReleaseId releaseId1 = ks.newReleaseId("org.kie", "hello-world", "1.0");
- ksession = ks.newKieContainer(gav1).newKieSession("KSession1");
+ ksession = ks.newKieContainer(releaseId1).newKieSession("KSession1");
ksession.insert(new Message("Hello World"));
assertEquals( 1, ksession.fireAllRules() );
- ksession = ks.newKieContainer(gav1).newKieSession("KSession1");
+ ksession = ks.newKieContainer(releaseId1).newKieSession("KSession1");
ksession.insert(new Message("Hi Universe"));
assertEquals( 0, ksession.fireAllRules() );
- GAV gav2 = ks.newGav("org.kie", "hello-world", "[1.0,1.2)");
+ ReleaseId releaseId2 = ks.newReleaseId("org.kie", "hello-world", "[1.0,1.2)");
- ksession = ks.newKieContainer(gav2).newKieSession("KSession1");
+ ksession = ks.newKieContainer(releaseId2).newKieSession("KSession1");
ksession.insert(new Message("Aloha Earth"));
assertEquals( 1, ksession.fireAllRules() );
- ksession = ks.newKieContainer(gav2).newKieSession("KSession1");
+ ksession = ks.newKieContainer(releaseId2).newKieSession("KSession1");
ksession.insert(new Message("Hi Universe"));
assertEquals( 0, ksession.fireAllRules() );
}
@@ -186,10 +186,10 @@ private void buildVersion(KieServices ks, String message, String version) {
"then\n" +
"end\n";
- GAV gav = ks.newGav("org.kie", "hello-world", version);
+ ReleaseId releaseId = ks.newReleaseId("org.kie", "hello-world", version);
KieFileSystem kfs = ks.newKieFileSystem()
- .generateAndWritePomXML( gav )
+ .generateAndWritePomXML(releaseId)
.write("src/main/resources/KBase1/org/pkg1/r1.drl", drl)
.writeKModuleXML(createKieProjectWithPackages(ks, "*").toXML());
ks.newKieBuilder( kfs ).buildAll();
@@ -219,36 +219,36 @@ public void testHelloWorldWithPackagesAnd2KieBases() throws Exception {
KieServices ks = KieServices.Factory.get();
- GAV gav = ks.newGav("org.kie", "hello-world", "1.0-SNAPSHOT");
+ ReleaseId releaseId = ks.newReleaseId("org.kie", "hello-world", "1.0-SNAPSHOT");
KieFileSystem kfs = ks.newKieFileSystem()
- .generateAndWritePomXML( gav )
+ .generateAndWritePomXML(releaseId)
.write("src/main/resources/KBase1/org/pkg1/r1.drl", drl1)
.write("src/main/resources/KBase1/org/pkg2/r2.drl", drl2)
.writeKModuleXML(createKieProjectWithPackagesAnd2KieBases(ks).toXML());
ks.newKieBuilder( kfs ).buildAll();
- KieSession ksession = ks.newKieContainer(gav).newKieSession("KSession1");
+ KieSession ksession = ks.newKieContainer(releaseId).newKieSession("KSession1");
ksession.insert(new Message("Hello World"));
assertEquals( 1, ksession.fireAllRules() );
- ksession = ks.newKieContainer(gav).newKieSession("KSession1");
+ ksession = ks.newKieContainer(releaseId).newKieSession("KSession1");
ksession.insert(new Message("Hi Universe"));
assertEquals( 1, ksession.fireAllRules() );
- ksession = ks.newKieContainer(gav).newKieSession("KSession1");
+ ksession = ks.newKieContainer(releaseId).newKieSession("KSession1");
ksession.insert(new Message("Aloha Earth"));
assertEquals( 0, ksession.fireAllRules() );
- ksession = ks.newKieContainer(gav).newKieSession("KSession2");
+ ksession = ks.newKieContainer(releaseId).newKieSession("KSession2");
ksession.insert(new Message("Hello World"));
assertEquals( 1, ksession.fireAllRules() );
- ksession = ks.newKieContainer(gav).newKieSession("KSession2");
+ ksession = ks.newKieContainer(releaseId).newKieSession("KSession2");
ksession.insert(new Message("Hi Universe"));
assertEquals( 0, ksession.fireAllRules() );
- ksession = ks.newKieContainer(gav).newKieSession("KSession2");
+ ksession = ks.newKieContainer(releaseId).newKieSession("KSession2");
ksession.insert(new Message("Aloha Earth"));
assertEquals(1, ksession.fireAllRules());
}
diff --git a/drools-compiler/src/test/java/org/drools/kproject/AbstractKnowledgeTest.java b/drools-compiler/src/test/java/org/drools/kproject/AbstractKnowledgeTest.java
index 8cf072d27b9..b672d8a2421 100644
--- a/drools-compiler/src/test/java/org/drools/kproject/AbstractKnowledgeTest.java
+++ b/drools-compiler/src/test/java/org/drools/kproject/AbstractKnowledgeTest.java
@@ -12,7 +12,7 @@
import org.drools.kproject.models.KieModuleModelImpl;
import org.junit.After;
import org.junit.Before;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieBaseModel;
import org.kie.builder.KieBuilder;
import org.kie.builder.KieModuleModel;
@@ -146,8 +146,8 @@ public KieModuleModel createKieModule(String namespace,
kfs.write( "src/main/resources/META-INF/beans.xml", generateBeansXML( kproj ) );
kfs.writeKModuleXML( ((KieModuleModelImpl)kproj).toXML() );
- GAV gav = ks.newGav( namespace, "art1", version );
- kfs.generateAndWritePomXML( gav );
+ ReleaseId releaseId = ks.newReleaseId(namespace, "art1", version);
+ kfs.generateAndWritePomXML(releaseId);
String kBase1R1 = getRule( namespace + ".test1", "rule1", version );
String kBase1R2 = getRule( namespace + ".test1", "rule2", version );
@@ -169,7 +169,7 @@ public KieModuleModel createKieModule(String namespace,
KieBuilder kBuilder = ks.newKieBuilder( kfs );
kBuilder.buildAll();
- if ( kBuilder.getResults().hasMessages( Level.ERROR ) ) {
+ if ( kBuilder.getResults().hasMessages(Level.ERROR) ) {
fail( "should not have errors" + kBuilder.getResults() );
}
MemoryKieModule kieModule = ( MemoryKieModule ) kBuilder.getKieModule();
diff --git a/drools-compiler/src/test/java/org/drools/kproject/KieProjectCDITest.java b/drools-compiler/src/test/java/org/drools/kproject/KieProjectCDITest.java
index 4d1925bcaed..7ea75a821d1 100644
--- a/drools-compiler/src/test/java/org/drools/kproject/KieProjectCDITest.java
+++ b/drools-compiler/src/test/java/org/drools/kproject/KieProjectCDITest.java
@@ -1,23 +1,13 @@
package org.drools.kproject;
import org.drools.cdi.CDITestRunner;
-import org.drools.cdi.KieCDIExtension;
-import org.drools.cdi.CDITestRunner.TestWeldSEDeployment;
import org.drools.kproject.models.KieModuleModelImpl;
import org.drools.rule.JavaDialectRuntimeData;
-import org.jboss.weld.bootstrap.api.Bootstrap;
-import org.jboss.weld.bootstrap.spi.Deployment;
import org.jboss.weld.environment.se.Weld;
import org.jboss.weld.environment.se.WeldContainer;
-import org.jboss.weld.resources.spi.ResourceLoader;
-import org.junit.AfterClass;
import org.junit.Test;
import org.kie.KieServices;
-import org.kie.builder.impl.AbstractKieModule;
import org.kie.builder.impl.KieServicesImpl;
-import org.kie.cdi.KBase;
-import org.kie.cdi.KGAV;
-import org.kie.cdi.KSession;
import javax.enterprise.inject.spi.Bean;
import javax.enterprise.util.AnnotationLiteral;
@@ -27,9 +17,7 @@
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.security.ProtectionDomain;
-import java.util.ArrayList;
import java.util.Enumeration;
-import java.util.List;
import java.util.Set;
import static org.junit.Assert.assertNotNull;
diff --git a/drools-compiler/src/test/java/org/drools/kproject/KieProjectRuntimeModulesTest.java b/drools-compiler/src/test/java/org/drools/kproject/KieProjectRuntimeModulesTest.java
index 7a4f10f2ab5..1a3cd0e8602 100644
--- a/drools-compiler/src/test/java/org/drools/kproject/KieProjectRuntimeModulesTest.java
+++ b/drools-compiler/src/test/java/org/drools/kproject/KieProjectRuntimeModulesTest.java
@@ -5,7 +5,7 @@
import org.junit.Test;
import org.kie.KieBase;
import org.kie.KieServices;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieModuleModel;
import org.kie.builder.impl.FileKieModule;
import org.kie.builder.impl.KieContainerImpl;
@@ -28,34 +28,34 @@ public void createMultpleJarAndFileResources() throws IOException,
KieModuleModel kProjModel3 = createKieModule( "jar3", true );
KieModuleModel kProjModel4 = createKieModule( "fol4", false );
- GAV gav1 = KieServices.Factory.get().newGav( "jar1",
- "art1",
- "1.0-SNAPSHOT" );
- GAV gav2 = KieServices.Factory.get().newGav( "jar2",
- "art1",
- "1.0-SNAPSHOT" );
- GAV gav3 = KieServices.Factory.get().newGav( "jar3",
- "art1",
- "1.0-SNAPSHOT" );
- GAV gav4 = KieServices.Factory.get().newGav( "fol4",
- "art1",
- "1.0-SNAPSHOT" );
+ ReleaseId releaseId1 = KieServices.Factory.get().newReleaseId("jar1",
+ "art1",
+ "1.0-SNAPSHOT");
+ ReleaseId releaseId2 = KieServices.Factory.get().newReleaseId("jar2",
+ "art1",
+ "1.0-SNAPSHOT");
+ ReleaseId releaseId3 = KieServices.Factory.get().newReleaseId("jar3",
+ "art1",
+ "1.0-SNAPSHOT");
+ ReleaseId releaseId4 = KieServices.Factory.get().newReleaseId("fol4",
+ "art1",
+ "1.0-SNAPSHOT");
java.io.File file1 = fileManager.newFile( "jar1.jar" );
java.io.File file2 = fileManager.newFile( "jar2.jar" );
java.io.File file3 = fileManager.newFile( "jar3.jar" );
java.io.File fol4 = fileManager.newFile( "fol4" );
- ZipKieModule mod1 = new ZipKieModule( gav1,
+ ZipKieModule mod1 = new ZipKieModule(releaseId1,
kProjModel1,
file1 );
- ZipKieModule mod2 = new ZipKieModule( gav2,
+ ZipKieModule mod2 = new ZipKieModule(releaseId2,
kProjModel2,
file2 );
- ZipKieModule mod3 = new ZipKieModule( gav3,
+ ZipKieModule mod3 = new ZipKieModule(releaseId3,
kProjModel3,
file3 );
- FileKieModule mod4 = new FileKieModule( gav4,
+ FileKieModule mod4 = new FileKieModule(releaseId4,
kProjModel4,
fol4 );
diff --git a/drools-compiler/src/test/java/org/kie/builder/WireListenerTest.java b/drools-compiler/src/test/java/org/kie/builder/WireListenerTest.java
index 91e21c695fb..32d99e9ab43 100644
--- a/drools-compiler/src/test/java/org/kie/builder/WireListenerTest.java
+++ b/drools-compiler/src/test/java/org/kie/builder/WireListenerTest.java
@@ -27,9 +27,9 @@ public class WireListenerTest {
public void testWireListener() throws Exception {
KieServices ks = KieServices.Factory.get();
- GAV gav = ks.newGav("org.kie", "listener-test", "1.0-SNAPSHOT");
- build(ks, gav);
- KieContainer kieContainer = ks.newKieContainer(gav);
+ ReleaseId releaseId = ks.newReleaseId("org.kie", "listener-test", "1.0-SNAPSHOT");
+ build(ks, releaseId);
+ KieContainer kieContainer = ks.newKieContainer(releaseId);
KieSession ksession = kieContainer.newKieSession();
ksession.fireAllRules();
@@ -39,7 +39,7 @@ public void testWireListener() throws Exception {
assertEquals(1, retractEvents.size());
}
- private void build(KieServices ks, GAV gav) throws IOException {
+ private void build(KieServices ks, ReleaseId releaseId) throws IOException {
KieModuleModel kproj = ks.newKieModuleModel();
KieSessionModel ksession1 = kproj.newKieBaseModel("KBase1").newKieSessionModel("KSession1").setDefault(true);
@@ -48,7 +48,7 @@ private void build(KieServices ks, GAV gav) throws IOException {
KieFileSystem kfs = ks.newKieFileSystem();
kfs.writeKModuleXML(kproj.toXML())
- .writePomXML( generatePomXml(gav) )
+ .writePomXML( generatePomXml(releaseId) )
.write("src/main/resources/KBase1/rules.drl", createDRL());
KieBuilder kieBuilder = ks.newKieBuilder(kfs);
diff --git a/drools-compiler/src/test/java/org/kie/util/ChangeSetBuilderTest.java b/drools-compiler/src/test/java/org/kie/util/ChangeSetBuilderTest.java
index 908e6185596..f3630658f8a 100644
--- a/drools-compiler/src/test/java/org/kie/util/ChangeSetBuilderTest.java
+++ b/drools-compiler/src/test/java/org/kie/util/ChangeSetBuilderTest.java
@@ -3,7 +3,7 @@
import org.drools.kproject.models.KieModuleModelImpl;
import org.junit.Test;
import org.kie.KieServices;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieBaseModel;
import org.kie.builder.KieModuleModel;
import org.kie.builder.KieSessionModel;
@@ -196,7 +196,7 @@ public void testModified2() {
private InternalKieModule createKieJar( String... drls) {
InternalKieModule kieJar = mock( InternalKieModule.class );
KieServices ks = KieServices.Factory.get();
- GAV gav = ks.newGav("org.kie", "hello-world", "1.0-SNAPSHOT");
+ ReleaseId releaseId = ks.newReleaseId("org.kie", "hello-world", "1.0-SNAPSHOT");
List<String> drlFs = new ArrayList<String>();
@@ -207,12 +207,12 @@ private InternalKieModule createKieJar( String... drls) {
when( kieJar.getBytes( fileName ) ).thenReturn( drls[i].getBytes() );
}
}
- when( kieJar.getBytes( KieModuleModelImpl.KMODULE_JAR_PATH ) ).thenReturn( createKieProjectWithPackages(ks, gav).toXML().getBytes() );
+ when( kieJar.getBytes( KieModuleModelImpl.KMODULE_JAR_PATH ) ).thenReturn( createKieProjectWithPackages(ks, releaseId).toXML().getBytes() );
when( kieJar.getFileNames() ).thenReturn( drlFs );
return ( InternalKieModule ) kieJar;
}
- private KieModuleModel createKieProjectWithPackages(KieServices ks, GAV gav) {
+ private KieModuleModel createKieProjectWithPackages(KieServices ks, ReleaseId releaseId) {
KieModuleModel kproj = ks.newKieModuleModel();
KieBaseModel kieBaseModel1 = kproj.newKieBaseModel("KBase1")
diff --git a/drools-decisiontables/src/test/java/org/drools/decisiontable/SpreadsheetIntegrationExampleTest.java b/drools-decisiontables/src/test/java/org/drools/decisiontable/SpreadsheetIntegrationExampleTest.java
index 947275ef269..0953eba67ba 100644
--- a/drools-decisiontables/src/test/java/org/drools/decisiontable/SpreadsheetIntegrationExampleTest.java
+++ b/drools-decisiontables/src/test/java/org/drools/decisiontable/SpreadsheetIntegrationExampleTest.java
@@ -67,7 +67,7 @@ private KieSession getKieSession(Resource dt) {
assertTrue( kb.getResults().getMessages().isEmpty() );
// get the session
- KieSession ksession = ks.newKieContainer(ks.getRepository().getDefaultGAV()).newKieSession();
+ KieSession ksession = ks.newKieContainer(ks.getRepository().getDefaultReleaseId()).newKieSession();
return ksession;
}
diff --git a/drools-maven-plugin/src/main/java/org/drools/BuildMojo.java b/drools-maven-plugin/src/main/java/org/drools/BuildMojo.java
index e16977ecda5..8a0875eec7c 100644
--- a/drools-maven-plugin/src/main/java/org/drools/BuildMojo.java
+++ b/drools-maven-plugin/src/main/java/org/drools/BuildMojo.java
@@ -58,7 +58,7 @@ public void execute() throws MojoExecutionException, MojoFailureException {
try {
KieRepository kr = ks.getRepository();
KieModule kModule = kr.addKieModule( ks.getResources().newFileSystemResource( sourceFolder ) );
- KieContainerImpl kContainer = (KieContainerImpl)ks.newKieContainer(kModule.getGAV());
+ KieContainerImpl kContainer = (KieContainerImpl)ks.newKieContainer(kModule.getReleaseId());
KieProject kieProject = kContainer.getKieProject();
ResultsImpl messages = kieProject.verify();
diff --git a/drools-persistence-jpa/src/test/java/org/kie/persistence/session/JpaPersistentStatefulSessionTest.java b/drools-persistence-jpa/src/test/java/org/kie/persistence/session/JpaPersistentStatefulSessionTest.java
index 63adabf3eba..6767d252051 100644
--- a/drools-persistence-jpa/src/test/java/org/kie/persistence/session/JpaPersistentStatefulSessionTest.java
+++ b/drools-persistence-jpa/src/test/java/org/kie/persistence/session/JpaPersistentStatefulSessionTest.java
@@ -90,7 +90,7 @@ public void testFactHandleSerialization() {
KieFileSystem kfs = ks.newKieFileSystem().write( "src/main/resources/r1.drl", str );
ks.newKieBuilder( kfs ).buildAll();
- KieBase kbase = ks.newKieContainer(ks.getRepository().getDefaultGAV()).getKieBase();
+ KieBase kbase = ks.newKieContainer(ks.getRepository().getDefaultReleaseId()).getKieBase();
KieSession ksession = JPAKnowledgeService.newStatefulKnowledgeSession( kbase, null, env );
List<?> list = new ArrayList<Object>();
@@ -148,7 +148,7 @@ public void testLocalTransactionPerStatement() {
KieFileSystem kfs = ks.newKieFileSystem().write( "src/main/resources/r1.drl", str );
ks.newKieBuilder( kfs ).buildAll();
- KieBase kbase = ks.newKieContainer(ks.getRepository().getDefaultGAV()).getKieBase();
+ KieBase kbase = ks.newKieContainer(ks.getRepository().getDefaultReleaseId()).getKieBase();
KieSession ksession = JPAKnowledgeService.newStatefulKnowledgeSession( kbase, null, env );
List<?> list = new ArrayList<Object>();
@@ -185,7 +185,7 @@ public void testUserTransactions() throws Exception {
KieFileSystem kfs = ks.newKieFileSystem().write( "src/main/resources/r1.drl", str );
ks.newKieBuilder( kfs ).buildAll();
- KieBase kbase = ks.newKieContainer(ks.getRepository().getDefaultGAV()).getKieBase();
+ KieBase kbase = ks.newKieContainer(ks.getRepository().getDefaultReleaseId()).getKieBase();
UserTransaction ut = (UserTransaction) new InitialContext().lookup( "java:comp/UserTransaction" );
ut.begin();
@@ -270,7 +270,7 @@ public void testInterceptor() {
KieFileSystem kfs = ks.newKieFileSystem().write( "src/main/resources/r1.drl", str );
ks.newKieBuilder( kfs ).buildAll();
- KieBase kbase = ks.newKieContainer(ks.getRepository().getDefaultGAV()).getKieBase();
+ KieBase kbase = ks.newKieContainer(ks.getRepository().getDefaultReleaseId()).getKieBase();
KieSession ksession = JPAKnowledgeService.newStatefulKnowledgeSession( kbase, null, env );
SingleSessionCommandService sscs = (SingleSessionCommandService)
@@ -306,7 +306,7 @@ public void testSetFocus() {
KieFileSystem kfs = ks.newKieFileSystem().write( "src/main/resources/r1.drl", str );
ks.newKieBuilder( kfs ).buildAll();
- KieBase kbase = ks.newKieContainer(ks.getRepository().getDefaultGAV()).getKieBase();
+ KieBase kbase = ks.newKieContainer(ks.getRepository().getDefaultReleaseId()).getKieBase();
KieSession ksession = JPAKnowledgeService.newStatefulKnowledgeSession( kbase, null, env );
List<?> list = new ArrayList<Object>();
@@ -328,7 +328,7 @@ public void testSetFocus() {
@Test
public void testSharedReferences() {
KieServices ks = KieServices.Factory.get();
- KieBase kbase = ks.newKieContainer(ks.getRepository().getDefaultGAV()).getKieBase();
+ KieBase kbase = ks.newKieContainer(ks.getRepository().getDefaultReleaseId()).getKieBase();
KieSession ksession = JPAKnowledgeService.newStatefulKnowledgeSession( kbase, null, env );
Person x = new Person( "test" );
@@ -357,7 +357,7 @@ public void testSharedReferences() {
public void testMergeConfig() {
// JBRULES-3155
KieServices ks = KieServices.Factory.get();
- KieBase kbase = ks.newKieContainer(ks.getRepository().getDefaultGAV()).getKieBase();
+ KieBase kbase = ks.newKieContainer(ks.getRepository().getDefaultReleaseId()).getKieBase();
Properties properties = new Properties();
properties.put("drools.processInstanceManagerFactory", "com.example.CustomJPAProcessInstanceManagerFactory");
diff --git a/kie-ci/src/main/java/org/drools/scanner/ArtifactResolver.java b/kie-ci/src/main/java/org/drools/scanner/ArtifactResolver.java
index fefc3ec0f27..4df88728d64 100644
--- a/kie-ci/src/main/java/org/drools/scanner/ArtifactResolver.java
+++ b/kie-ci/src/main/java/org/drools/scanner/ArtifactResolver.java
@@ -2,7 +2,7 @@
import org.apache.maven.project.MavenProject;
import org.drools.scanner.embedder.EmbeddedPomParser;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.sonatype.aether.artifact.Artifact;
import java.io.File;
@@ -51,8 +51,8 @@ Collection<DependencyDescriptor> getAllDependecies() {
return dependencies;
}
- public static ArtifactResolver getResolverFor(GAV gav, boolean allowDefaultPom) {
- MavenProject mavenProject = getMavenProjectForGAV(gav);
+ public static ArtifactResolver getResolverFor(ReleaseId releaseId, boolean allowDefaultPom) {
+ MavenProject mavenProject = getMavenProjectForGAV(releaseId);
return mavenProject == null ?
(allowDefaultPom ? new ArtifactResolver() : null) :
new ArtifactResolver(mavenProject);
@@ -67,8 +67,8 @@ public static ArtifactResolver getResolverFor(File pomFile) {
return new ArtifactResolver(mavenProject);
}
- static MavenProject getMavenProjectForGAV(GAV gav) {
- String artifactName = gav.getGroupId() + ":" + gav.getArtifactId() + ":pom:" + gav.getVersion();
+ static MavenProject getMavenProjectForGAV(ReleaseId releaseId) {
+ String artifactName = releaseId.getGroupId() + ":" + releaseId.getArtifactId() + ":pom:" + releaseId.getVersion();
Artifact artifact = MavenRepository.getMavenRepository().resolveArtifact(artifactName);
return artifact != null ? parseMavenPom(artifact.getFile()) : null;
}
diff --git a/kie-ci/src/main/java/org/drools/scanner/DependencyDescriptor.java b/kie-ci/src/main/java/org/drools/scanner/DependencyDescriptor.java
index bb664538a27..968ccd1aecd 100644
--- a/kie-ci/src/main/java/org/drools/scanner/DependencyDescriptor.java
+++ b/kie-ci/src/main/java/org/drools/scanner/DependencyDescriptor.java
@@ -3,8 +3,8 @@
import org.apache.maven.artifact.versioning.ArtifactVersion;
import org.apache.maven.artifact.versioning.DefaultArtifactVersion;
import org.apache.maven.model.Dependency;
-import org.drools.kproject.GAVImpl;
-import org.kie.builder.GAV;
+import org.drools.kproject.ReleaseIdImpl;
+import org.kie.builder.ReleaseId;
import org.sonatype.aether.artifact.Artifact;
public class DependencyDescriptor {
@@ -38,10 +38,10 @@ public DependencyDescriptor(String groupId, String artifactId, String version, S
artifactVersion = new DefaultArtifactVersion(version);
}
- public DependencyDescriptor(GAV gav) {
- groupId = gav.getGroupId();
- artifactId = gav.getArtifactId();
- version = gav.getVersion();
+ public DependencyDescriptor(ReleaseId releaseId) {
+ groupId = releaseId.getGroupId();
+ artifactId = releaseId.getArtifactId();
+ version = releaseId.getVersion();
type = "jar";
artifactVersion = new DefaultArtifactVersion(version);
}
@@ -58,8 +58,8 @@ public String getVersion() {
return version;
}
- public GAV getGav() {
- return new GAVImpl(groupId, artifactId, version);
+ public ReleaseId getGav() {
+ return new ReleaseIdImpl(groupId, artifactId, version);
}
public String getType() {
diff --git a/kie-ci/src/main/java/org/drools/scanner/KieModuleMetaData.java b/kie-ci/src/main/java/org/drools/scanner/KieModuleMetaData.java
index f3af0cec34a..bab133533ed 100644
--- a/kie-ci/src/main/java/org/drools/scanner/KieModuleMetaData.java
+++ b/kie-ci/src/main/java/org/drools/scanner/KieModuleMetaData.java
@@ -1,6 +1,6 @@
package org.drools.scanner;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import java.io.File;
import java.util.Collection;
@@ -14,8 +14,8 @@ public interface KieModuleMetaData {
Class<?> getClass(String pkgName, String className);
public static class Factory {
- public static KieModuleMetaData newKieModuleMetaData(GAV gav) {
- return new KieModuleMetaDataImpl(gav);
+ public static KieModuleMetaData newKieModuleMetaData(ReleaseId releaseId) {
+ return new KieModuleMetaDataImpl(releaseId);
}
public KieModuleMetaData newKieModuleMetaDataImpl(File pomFile) {
diff --git a/kie-ci/src/main/java/org/drools/scanner/KieModuleMetaDataImpl.java b/kie-ci/src/main/java/org/drools/scanner/KieModuleMetaDataImpl.java
index 10198b0c2f2..d437cd6600c 100644
--- a/kie-ci/src/main/java/org/drools/scanner/KieModuleMetaDataImpl.java
+++ b/kie-ci/src/main/java/org/drools/scanner/KieModuleMetaDataImpl.java
@@ -1,6 +1,6 @@
package org.drools.scanner;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.sonatype.aether.artifact.Artifact;
import java.io.File;
@@ -30,11 +30,11 @@ public class KieModuleMetaDataImpl implements KieModuleMetaData {
private URLClassLoader classLoader;
- private GAV gav;
+ private ReleaseId releaseId;
- public KieModuleMetaDataImpl(GAV gav) {
- this.artifactResolver = getResolverFor(gav, false);
- this.gav = gav;
+ public KieModuleMetaDataImpl(ReleaseId releaseId) {
+ this.artifactResolver = getResolverFor(releaseId, false);
+ this.releaseId = releaseId;
init();
}
@@ -77,8 +77,8 @@ private ClassLoader getClassLoader() {
}
private void init() {
- if (gav != null) {
- addArtifact(artifactResolver.resolveArtifact(gav.toString()));
+ if (releaseId != null) {
+ addArtifact(artifactResolver.resolveArtifact(releaseId.toString()));
}
for (DependencyDescriptor dep : artifactResolver.getAllDependecies()) {
addArtifact(artifactResolver.resolveArtifact(dep.toString()));
diff --git a/kie-ci/src/main/java/org/drools/scanner/KieRepositoryScannerImpl.java b/kie-ci/src/main/java/org/drools/scanner/KieRepositoryScannerImpl.java
index 0d8164831f5..c153caa0d26 100644
--- a/kie-ci/src/main/java/org/drools/scanner/KieRepositoryScannerImpl.java
+++ b/kie-ci/src/main/java/org/drools/scanner/KieRepositoryScannerImpl.java
@@ -1,7 +1,7 @@
package org.drools.scanner;
import org.drools.kproject.models.KieModuleModelImpl;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieModule;
import org.kie.builder.KieScanner;
import org.kie.builder.Message;
@@ -44,12 +44,12 @@ public class KieRepositoryScannerImpl implements InternalKieScanner {
public void setKieContainer(KieContainer kieContainer) {
this.kieContainer = kieContainer;
- DependencyDescriptor projectDescr = new DependencyDescriptor(kieContainer.getGAV());
+ DependencyDescriptor projectDescr = new DependencyDescriptor(kieContainer.getReleaseId());
if (!projectDescr.isFixedVersion()) {
usedDependencies.add(projectDescr);
}
- artifactResolver = getResolverFor(kieContainer.getGAV(), true);
+ artifactResolver = getResolverFor(kieContainer.getReleaseId(), true);
init();
}
@@ -70,27 +70,27 @@ private void init() {
indexAtifacts(artifacts);
}
- public KieModule loadArtifact(GAV gav) {
- String artifactName = gav.toString();
+ public KieModule loadArtifact(ReleaseId releaseId) {
+ String artifactName = releaseId.toString();
Artifact artifact = getArtifactResolver().resolveArtifact(artifactName);
- return artifact != null ? buildArtifact(gav, artifact) : loadPomArtifact(gav);
+ return artifact != null ? buildArtifact(releaseId, artifact) : loadPomArtifact(releaseId);
}
- private KieModule loadPomArtifact(GAV gav) {
- ArtifactResolver resolver = getResolverFor(gav, false);
+ private KieModule loadPomArtifact(ReleaseId releaseId) {
+ ArtifactResolver resolver = getResolverFor(releaseId, false);
if (resolver == null) {
return null;
}
- MemoryKieModule kieModule = new MemoryKieModule(gav);
+ MemoryKieModule kieModule = new MemoryKieModule(releaseId);
addDependencies(kieModule, resolver, resolver.getPomDirectDependencies());
build(kieModule);
return kieModule;
}
- private InternalKieModule buildArtifact(GAV gav, Artifact artifact) {
+ private InternalKieModule buildArtifact(ReleaseId releaseId, Artifact artifact) {
ArtifactResolver resolver = getArtifactResolver();
- ZipKieModule kieModule = new ZipKieModule(gav, artifact.getFile());
+ ZipKieModule kieModule = new ZipKieModule(releaseId, artifact.getFile());
addDependencies(kieModule, resolver, resolver.getArtifactDependecies(new DependencyDescriptor(artifact).toString()));
build(kieModule);
return kieModule;
@@ -100,8 +100,8 @@ private void addDependencies(InternalKieModule kieModule, ArtifactResolver resol
for (DependencyDescriptor dep : dependencies) {
Artifact depArtifact = resolver.resolveArtifact(dep.toString());
if (isKJar(depArtifact.getFile())) {
- GAV depGav = new DependencyDescriptor(depArtifact).getGav();
- kieModule.addDependency(new ZipKieModule(depGav, depArtifact.getFile()));
+ ReleaseId depReleaseId = new DependencyDescriptor(depArtifact).getGav();
+ kieModule.addDependency(new ZipKieModule(depReleaseId, depArtifact.getFile()));
}
}
}
@@ -154,11 +154,11 @@ public void scanNow() {
log.info("The following artifacts have been updated: " + updatedArtifacts);
}
- private void updateKieModule(Artifact artifact, GAV gav) {
- ZipKieModule kieModule = new ZipKieModule(gav, artifact.getFile());
+ private void updateKieModule(Artifact artifact, ReleaseId releaseId) {
+ ZipKieModule kieModule = new ZipKieModule(releaseId, artifact.getFile());
ResultsImpl messages = build(kieModule);
if ( messages.filterMessages(Message.Level.ERROR).isEmpty()) {
- kieContainer.updateToVersion(gav);
+ kieContainer.updateToVersion(releaseId);
}
}
diff --git a/kie-ci/src/main/java/org/drools/scanner/MavenRepository.java b/kie-ci/src/main/java/org/drools/scanner/MavenRepository.java
index 65dc36eac36..37035e3a1ca 100644
--- a/kie-ci/src/main/java/org/drools/scanner/MavenRepository.java
+++ b/kie-ci/src/main/java/org/drools/scanner/MavenRepository.java
@@ -1,7 +1,7 @@
package org.drools.scanner;
import org.apache.maven.project.MavenProject;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.impl.InternalKieModule;
import org.sonatype.aether.artifact.Artifact;
import org.sonatype.aether.collection.CollectRequest;
@@ -85,8 +85,8 @@ public Artifact resolveArtifact(String artifactName) {
return artifactResult.getArtifact();
}
- public void deployArtifact(GAV gav, InternalKieModule kieModule, File pomfile) {
- File jarFile = new File( System.getProperty( "java.io.tmpdir" ), gav + ".jar");
+ public void deployArtifact(ReleaseId releaseId, InternalKieModule kieModule, File pomfile) {
+ File jarFile = new File( System.getProperty( "java.io.tmpdir" ), releaseId + ".jar");
try {
FileOutputStream fos = new FileOutputStream(jarFile);
fos.write(kieModule.getBytes());
@@ -95,11 +95,11 @@ public void deployArtifact(GAV gav, InternalKieModule kieModule, File pomfile) {
} catch (IOException e) {
throw new RuntimeException(e);
}
- deployArtifact(gav, jarFile, pomfile);
+ deployArtifact(releaseId, jarFile, pomfile);
}
- public void deployArtifact(GAV gav, File jar, File pomfile) {
- Artifact jarArtifact = new DefaultArtifact( gav.getGroupId(), gav.getArtifactId(), "jar", gav.getVersion() );
+ public void deployArtifact(ReleaseId releaseId, File jar, File pomfile) {
+ Artifact jarArtifact = new DefaultArtifact( releaseId.getGroupId(), releaseId.getArtifactId(), "jar", releaseId.getVersion() );
jarArtifact = jarArtifact.setFile( jar );
Artifact pomArtifact = new SubArtifact( jarArtifact, "", "pom" );
diff --git a/kie-ci/src/test/java/org/drools/scanner/KieModuleMetaDataTest.java b/kie-ci/src/test/java/org/drools/scanner/KieModuleMetaDataTest.java
index 563f7292448..f64be1188ed 100644
--- a/kie-ci/src/test/java/org/drools/scanner/KieModuleMetaDataTest.java
+++ b/kie-ci/src/test/java/org/drools/scanner/KieModuleMetaDataTest.java
@@ -2,7 +2,7 @@
import org.junit.Ignore;
import org.junit.Test;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.KieServices;
import static junit.framework.Assert.assertEquals;
@@ -12,8 +12,8 @@ public class KieModuleMetaDataTest {
@Test @Ignore
public void testKScanner() throws Exception {
- GAV gav = KieServices.Factory.get().newGav("org.drools", "drools-core", "5.5.0.Final");
- KieModuleMetaData kieModuleMetaData = KieModuleMetaData.Factory.newKieModuleMetaData(gav);
+ ReleaseId releaseId = KieServices.Factory.get().newReleaseId("org.drools", "drools-core", "5.5.0.Final");
+ KieModuleMetaData kieModuleMetaData = KieModuleMetaData.Factory.newKieModuleMetaData(releaseId);
assertEquals(17, kieModuleMetaData.getClasses("org.drools.runtime").size());
diff --git a/kie-ci/src/test/java/org/drools/scanner/KieRepositoryScannerTest.java b/kie-ci/src/test/java/org/drools/scanner/KieRepositoryScannerTest.java
index 1cfdb9aa6a9..5fc70cb6fd3 100644
--- a/kie-ci/src/test/java/org/drools/scanner/KieRepositoryScannerTest.java
+++ b/kie-ci/src/test/java/org/drools/scanner/KieRepositoryScannerTest.java
@@ -6,7 +6,7 @@
import org.junit.Ignore;
import org.junit.Test;
import org.kie.KieServices;
-import org.kie.builder.GAV;
+import org.kie.builder.ReleaseId;
import org.kie.builder.KieBaseModel;
import org.kie.builder.KieBuilder;
import org.kie.builder.KieFileSystem;
@@ -39,8 +39,8 @@ public class KieRepositoryScannerTest {
public void setUp() throws Exception {
this.fileManager = new FileManager();
this.fileManager.setUp();
- GAV gav = KieServices.Factory.get().newGav("org.kie", "scanner-test", "1.0-SNAPSHOT");
- kPom = createKPom(gav);
+ ReleaseId releaseId = KieServices.Factory.get().newReleaseId("org.kie", "scanner-test", "1.0-SNAPSHOT");
+ kPom = createKPom(releaseId);
}
@After
@@ -57,23 +57,23 @@ private void resetFileManager() {
@Test @Ignore
public void testKScanner() throws Exception {
KieServices ks = KieServices.Factory.get();
- GAV gav = ks.newGav("org.kie", "scanner-test", "1.0-SNAPSHOT");
+ ReleaseId releaseId = ks.newReleaseId("org.kie", "scanner-test", "1.0-SNAPSHOT");
- InternalKieModule kJar1 = createKieJar(ks, gav, "rule1", "rule2");
- KieContainer kieContainer = ks.newKieContainer(gav);
+ InternalKieModule kJar1 = createKieJar(ks, releaseId, "rule1", "rule2");
+ KieContainer kieContainer = ks.newKieContainer(releaseId);
MavenRepository repository = getMavenRepository();
- repository.deployArtifact(gav, kJar1, kPom);
+ repository.deployArtifact(releaseId, kJar1, kPom);
// create a ksesion and check it works as expected
KieSession ksession = kieContainer.newKieSession("KSession1");
checkKSession(ksession, "rule1", "rule2");
// create a new kjar
- InternalKieModule kJar2 = createKieJar(ks, gav, "rule2", "rule3");
+ InternalKieModule kJar2 = createKieJar(ks, releaseId, "rule2", "rule3");
// deploy it on maven
- repository.deployArtifact(gav, kJar2, kPom);
+ repository.deployArtifact(releaseId, kJar2, kPom);
// since I am not calling start() on the scanner it means it won't have automatic scheduled scanning
KieScanner scanner = ks.newKieScanner(kieContainer);
@@ -89,21 +89,21 @@ public void testKScanner() throws Exception {
@Test @Ignore
public void testKScannerWithKJarContainingClasses() throws Exception {
KieServices ks = KieServices.Factory.get();
- GAV gav = ks.newGav("org.kie", "scanner-test", "1.0-SNAPSHOT");
+ ReleaseId releaseId = ks.newReleaseId("org.kie", "scanner-test", "1.0-SNAPSHOT");
- InternalKieModule kJar1 = createKieJarWithClass(ks, gav, 2, 7);
+ InternalKieModule kJar1 = createKieJarWithClass(ks, releaseId, 2, 7);
MavenRepository repository = getMavenRepository();
- repository.deployArtifact(gav, kJar1, kPom);
+ repository.deployArtifact(releaseId, kJar1, kPom);
- KieContainer kieContainer = ks.newKieContainer(gav);
+ KieContainer kieContainer = ks.newKieContainer(releaseId);
KieScanner scanner = ks.newKieScanner(kieContainer);
KieSession ksession = kieContainer.newKieSession("KSession1");
checkKSession(ksession, 14);
- InternalKieModule kJar2 = createKieJarWithClass(ks, gav, 3, 5);
- repository.deployArtifact(gav, kJar2, kPom);
+ InternalKieModule kJar2 = createKieJarWithClass(ks, releaseId, 3, 5);
+ repository.deployArtifact(releaseId, kJar2, kPom);
scanner.scanNow();
@@ -116,7 +116,7 @@ public void testLoadKieJarFromMavenRepo() throws Exception {
// This test depends from the former one (UGLY!) and must be run immediately after it
KieServices ks = KieServices.Factory.get();
- KieContainer kieContainer = ks.newKieContainer(ks.newGav("org.kie", "scanner-test", "1.0-SNAPSHOT"));
+ KieContainer kieContainer = ks.newKieContainer(ks.newReleaseId("org.kie", "scanner-test", "1.0-SNAPSHOT"));
KieSession ksession2 = kieContainer.newKieSession("KSession1");
checkKSession(ksession2, 15);
@@ -125,25 +125,25 @@ public void testLoadKieJarFromMavenRepo() throws Exception {
@Test @Ignore
public void testScannerOnPomProject() throws Exception {
KieServices ks = KieServices.Factory.get();
- GAV gav1 = ks.newGav("org.kie", "scanner-test", "1.0");
- GAV gav2 = ks.newGav("org.kie", "scanner-test", "2.0");
+ ReleaseId releaseId1 = ks.newReleaseId("org.kie", "scanner-test", "1.0");
+ ReleaseId releaseId2 = ks.newReleaseId("org.kie", "scanner-test", "2.0");
MavenRepository repository = getMavenRepository();
repository.deployPomArtifact("org.kie", "scanner-master-test", "1.0", createMasterKPom());
resetFileManager();
- InternalKieModule kJar1 = createKieJarWithClass(ks, gav1, 2, 7);
- repository.deployArtifact(gav1, kJar1, createKPom(gav1));
+ InternalKieModule kJar1 = createKieJarWithClass(ks, releaseId1, 2, 7);
+ repository.deployArtifact(releaseId1, kJar1, createKPom(releaseId1));
- KieContainer kieContainer = ks.newKieContainer(ks.newGav("org.kie", "scanner-master-test", "LATEST"));
+ KieContainer kieContainer = ks.newKieContainer(ks.newReleaseId("org.kie", "scanner-master-test", "LATEST"));
KieSession ksession = kieContainer.newKieSession("KSession1");
checkKSession(ksession, 14);
KieScanner scanner = ks.newKieScanner(kieContainer);
- InternalKieModule kJar2 = createKieJarWithClass(ks, gav2, 3, 5);
- repository.deployArtifact(gav2, kJar2, createKPom(gav1));
+ InternalKieModule kJar2 = createKieJarWithClass(ks, releaseId2, 3, 5);
+ repository.deployArtifact(releaseId2, kJar2, createKPom(releaseId1));
scanner.scanNow();
@@ -151,21 +151,21 @@ public void testScannerOnPomProject() throws Exception {
checkKSession(ksession2, 15);
}
- private File createKPom(GAV gav) throws IOException {
+ private File createKPom(ReleaseId releaseId) throws IOException {
File pomFile = fileManager.newFile("pom.xml");
- fileManager.write(pomFile, getPom(gav));
+ fileManager.write(pomFile, getPom(releaseId));
return pomFile;
}
- private String getPom(GAV gav) {
+ private String getPom(ReleaseId releaseId) {
return "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd\">\n" +
" <modelVersion>4.0.0</modelVersion>\n" +
"\n" +
- " <groupId>" + gav.getGroupId() + "</groupId>\n" +
- " <artifactId>" + gav.getArtifactId() + "</artifactId>\n" +
- " <version>" + gav.getVersion() + "</version>\n" +
+ " <groupId>" + releaseId.getGroupId() + "</groupId>\n" +
+ " <artifactId>" + releaseId.getArtifactId() + "</artifactId>\n" +
+ " <version>" + releaseId.getVersion() + "</version>\n" +
"\n" +
"</project>";
}
@@ -196,7 +196,7 @@ private File createMasterKPom() throws IOException {
return pomFile;
}
- private InternalKieModule createKieJar(KieServices ks, GAV gav, String... rules) throws IOException {
+ private InternalKieModule createKieJar(KieServices ks, ReleaseId releaseId, String... rules) throws IOException {
KieFileSystem kfs = ks.newKieFileSystem();
for (String rule : rules) {
String file = "org/test/" + rule + ".drl";
@@ -214,7 +214,7 @@ private InternalKieModule createKieJar(KieServices ks, GAV gav, String... rules)
.setClockType( ClockTypeOption.get("realtime") );
kfs.writeKModuleXML(kproj.toXML());
- kfs.writePomXML( getPom(gav) );
+ kfs.writePomXML( getPom(releaseId) );
KieBuilder kieBuilder = ks.newKieBuilder(kfs);
assertTrue(kieBuilder.buildAll().getResults().getMessages().isEmpty());
@@ -243,7 +243,7 @@ private void checkKSession(KieSession ksession, Object... results) {
}
}
- private InternalKieModule createKieJarWithClass(KieServices ks, GAV gav, int value, int factor) throws IOException {
+ private InternalKieModule createKieJarWithClass(KieServices ks, ReleaseId releaseId, int value, int factor) throws IOException {
KieFileSystem kieFileSystem = ks.newKieFileSystem();
KieModuleModel kproj = ks.newKieModuleModel();
@@ -258,7 +258,7 @@ private InternalKieModule createKieJarWithClass(KieServices ks, GAV gav, int val
kieFileSystem
.writeKModuleXML(kproj.toXML())
- .writePomXML(getPom(gav))
+ .writePomXML(getPom(releaseId))
.write("src/main/resources/" + kieBaseModel1.getName() + "/rule1.drl", createDRLForJavaSource(value))
.write("src/main/java/org/kie/test/Bean.java", createJavaSource(factor));
|
626a84a93585cdde6719875ffd0fdc6d3e02af6f
|
kotlin
|
reimplemented extension literal definition and- calls to comply with jquery conventions--
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/translator/src/org/jetbrains/k2js/translate/context/StandardClasses.java b/translator/src/org/jetbrains/k2js/translate/context/StandardClasses.java
index 2f831318824c2..fe520679c4283 100644
--- a/translator/src/org/jetbrains/k2js/translate/context/StandardClasses.java
+++ b/translator/src/org/jetbrains/k2js/translate/context/StandardClasses.java
@@ -103,8 +103,9 @@ public static StandardClasses bindImplementations(@NotNull JsScope kotlinObjectS
private static void declareJQuery(@NotNull StandardClasses standardClasses) {
standardClasses.declare().forFQ("jquery.JQuery").externalClass("jQuery")
- .methods("addClass", "attr", "hasClass", "append");
+ .methods("addClass", "attr", "hasClass", "append", "text", "ready");
standardClasses.declare().forFQ("jquery.jq").externalFunction("jQuery");
+ standardClasses.declare().forFQ("jquery.get-document").externalObject("document");
}
//TODO: test all the methods
@@ -142,6 +143,8 @@ private static void declareJetObjects(@NotNull StandardClasses standardClasses)
standardClasses.declare().forFQ("jet.String").kotlinClass("String").
properties("length");
+
+ standardClasses.declare().forFQ("jet.Any.toString").kotlinFunction("toString");
}
private static void declareTopLevelFunctions(@NotNull StandardClasses standardClasses) {
diff --git a/translator/src/org/jetbrains/k2js/translate/expression/FunctionTranslator.java b/translator/src/org/jetbrains/k2js/translate/expression/FunctionTranslator.java
index 1760c73ffaf3a..77a370b169f9a 100644
--- a/translator/src/org/jetbrains/k2js/translate/expression/FunctionTranslator.java
+++ b/translator/src/org/jetbrains/k2js/translate/expression/FunctionTranslator.java
@@ -1,5 +1,6 @@
package org.jetbrains.k2js.translate.expression;
+
import com.google.dart.compiler.backend.js.ast.*;
import com.google.dart.compiler.util.AstUtil;
import org.jetbrains.annotations.NotNull;
@@ -195,7 +196,7 @@ private void mayBeAddThisParameterForExtensionFunction(@NotNull List<JsParameter
}
private boolean isExtensionFunction() {
- return DescriptorUtils.isExtensionFunction(descriptor);
+ return DescriptorUtils.isExtensionFunction(descriptor) && !isLiteral();
}
private boolean isLiteral() {
diff --git a/translator/src/org/jetbrains/k2js/translate/reference/CallTranslator.java b/translator/src/org/jetbrains/k2js/translate/reference/CallTranslator.java
index c89fe93decab1..f0973d2906e6f 100644
--- a/translator/src/org/jetbrains/k2js/translate/reference/CallTranslator.java
+++ b/translator/src/org/jetbrains/k2js/translate/reference/CallTranslator.java
@@ -2,6 +2,7 @@
import com.google.dart.compiler.backend.js.ast.JsExpression;
import com.google.dart.compiler.backend.js.ast.JsInvocation;
+import com.google.dart.compiler.backend.js.ast.JsNameRef;
import com.google.dart.compiler.backend.js.ast.JsNew;
import com.google.dart.compiler.util.AstUtil;
import org.jetbrains.annotations.NotNull;
@@ -129,7 +130,6 @@ private static JetExpression getActualArgument(
public static JsExpression translate(@Nullable JsExpression receiver,
@NotNull CallableDescriptor descriptor,
@NotNull TranslationContext context) {
- //TODO: HACK!
return translate(receiver, Collections.<JsExpression>emptyList(),
ResolvedCallImpl.create(descriptor), null, context);
}
@@ -224,12 +224,37 @@ private JsExpression translate() {
if (isConstructor()) {
return constructorCall();
}
+ if (isExtensionFunctionLiteral()) {
+ return extensionFunctionLiteralCall();
+ }
if (isExtensionFunction()) {
return extensionFunctionCall();
}
return methodCall();
}
+ @NotNull
+ private JsExpression extensionFunctionLiteralCall() {
+
+ List<JsExpression> callArguments = new ArrayList<JsExpression>();
+ assert receiver != null;
+ callArguments.add(thisObject());
+ callArguments.addAll(arguments);
+ receiver = null;
+ JsNameRef callMethodNameRef = AstUtil.newQualifiedNameRef("call");
+ JsInvocation callMethodInvocation = new JsInvocation();
+ callMethodInvocation.setQualifier(callMethodNameRef);
+ AstUtil.setQualifier(callMethodInvocation, calleeReference());
+ callMethodInvocation.setArguments(callArguments);
+ return callMethodInvocation;
+ }
+
+ private boolean isExtensionFunctionLiteral() {
+ boolean isLiteral = descriptor instanceof VariableAsFunctionDescriptor
+ || descriptor instanceof ExpressionAsFunctionDescriptor;
+ return isExtensionFunction() && isLiteral;
+ }
+
@NotNull
private JsExpression extensionFunctionCall() {
receiver = getExtensionFunctionCallReceiver();
@@ -257,8 +282,10 @@ private JsExpression getExtensionFunctionCallReceiver() {
return getThisObject(context(), expectedReceiverDescriptor);
}
+ @SuppressWarnings("UnnecessaryLocalVariable")
private boolean isExtensionFunction() {
- return resolvedCall.getReceiverArgument().exists();
+ boolean hasReceiver = resolvedCall.getReceiverArgument().exists();
+ return hasReceiver;
}
@NotNull
diff --git a/translator/src/org/jetbrains/k2js/utils/JetFileUtils.java b/translator/src/org/jetbrains/k2js/utils/JetFileUtils.java
index 119592566fb64..e06a7e9e1774f 100644
--- a/translator/src/org/jetbrains/k2js/utils/JetFileUtils.java
+++ b/translator/src/org/jetbrains/k2js/utils/JetFileUtils.java
@@ -1,5 +1,6 @@
package org.jetbrains.k2js.utils;
+import com.intellij.openapi.Disposable;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
@@ -10,6 +11,7 @@
import com.intellij.testFramework.LightVirtualFile;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
+import org.jetbrains.jet.compiler.JetCoreEnvironment;
import org.jetbrains.jet.lang.psi.JetFile;
import org.jetbrains.jet.plugin.JetLanguage;
@@ -21,13 +23,13 @@
*/
public final class JetFileUtils {
-// @NotNull
-// private static JetCoreEnvironment testOnlyEnvironment = new JetCoreEnvironment(new Disposable() {
-//
-// @Override
-// public void dispose() {
-// }
-// });
+ @NotNull
+ private static JetCoreEnvironment testOnlyEnvironment = new JetCoreEnvironment(new Disposable() {
+
+ @Override
+ public void dispose() {
+ }
+ });
@NotNull
public static String loadFile(@NotNull String path) throws IOException {
@@ -63,7 +65,8 @@ private static PsiFile createFile(@NotNull String name, @NotNull String text, @N
virtualFile.setCharset(CharsetToolkit.UTF8_CHARSET);
Project realProject = project;
if (realProject == null) {
- throw new RuntimeException();
+ realProject = testOnlyEnvironment.getProject();
+ //throw new RuntimeException();
}
PsiFile result = ((PsiFileFactoryImpl) PsiFileFactory.getInstance(realProject))
.trySetupPsiForFile(virtualFile, JetLanguage.INSTANCE, true, false);
diff --git a/translator/testFiles/kotlin_lib.js b/translator/testFiles/kotlin_lib.js
index 2eff88d9e4489..242b721c62bd8 100644
--- a/translator/testFiles/kotlin_lib.js
+++ b/translator/testFiles/kotlin_lib.js
@@ -902,6 +902,9 @@ Kotlin.StringBuilder = Kotlin.Class.create(
}
);
+Kotlin.toString = function(obj) {
+ return obj.toString();
+};
/**
* Copyright 2010 Tim Down.
|
44a27c5cd76f44e435671c69d1d8f60c42a2b420
|
hbase
|
HBASE-11920 Add CP hooks for ReplicationEndPoint--
|
a
|
https://github.com/apache/hbase
|
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java
index 768481d55496..a6b9d848b212 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java
@@ -19,23 +19,23 @@
package org.apache.hadoop.hbase.coprocessor;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
-import java.io.IOException;
-import java.util.List;
-
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving
public abstract class BaseMasterAndRegionObserver extends BaseRegionObserver
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionServerObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionServerObserver.java
index 5bc23d3e6711..c21cdf884dc1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionServerObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionServerObserver.java
@@ -24,6 +24,7 @@
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
/**
* An abstract class that implements RegionServerObserver.
@@ -76,4 +77,10 @@ public void preRollWALWriterRequest(ObserverContext<RegionServerCoprocessorEnvir
public void postRollWALWriterRequest(ObserverContext<RegionServerCoprocessorEnvironment> ctx)
throws IOException { }
+ @Override
+ public ReplicationEndpoint postCreateReplicationEndPoint(
+ ObserverContext<RegionServerCoprocessorEnvironment> ctx, ReplicationEndpoint endpoint) {
+ return endpoint;
+ }
+
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerObserver.java
index 8a76d46d04f9..5c07fd2180a3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerObserver.java
@@ -25,6 +25,7 @@
import org.apache.hadoop.hbase.MetaMutationAnnotation;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
public interface RegionServerObserver extends Coprocessor {
@@ -121,4 +122,13 @@ void preRollWALWriterRequest(final ObserverContext<RegionServerCoprocessorEnviro
void postRollWALWriterRequest(final ObserverContext<RegionServerCoprocessorEnvironment> ctx)
throws IOException;
+ /**
+ * This will be called after the replication endpoint is instantiated.
+ * @param ctx
+ * @param endpoint - the base endpoint for replication
+ * @return the endpoint to use during replication.
+ */
+ ReplicationEndpoint postCreateReplicationEndPoint(
+ ObserverContext<RegionServerCoprocessorEnvironment> ctx, ReplicationEndpoint endpoint);
+
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
index 54552c622fe6..ec44560c32a6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
@@ -34,6 +34,7 @@
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
+import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving
@@ -156,6 +157,27 @@ public void call(RegionServerObserver oserver,
});
}
+ public ReplicationEndpoint postCreateReplicationEndPoint(final ReplicationEndpoint endpoint)
+ throws IOException {
+ return execOperationWithResult(endpoint, coprocessors.isEmpty() ? null
+ : new CoprocessOperationWithResult<ReplicationEndpoint>() {
+ @Override
+ public void call(RegionServerObserver oserver,
+ ObserverContext<RegionServerCoprocessorEnvironment> ctx) throws IOException {
+ setResult(oserver.postCreateReplicationEndPoint(ctx, getResult()));
+ }
+ });
+ }
+
+ private <T> T execOperationWithResult(final T defaultValue,
+ final CoprocessOperationWithResult<T> ctx) throws IOException {
+ if (ctx == null)
+ return defaultValue;
+ ctx.setResult(defaultValue);
+ execOperation(ctx);
+ return ctx.getResult();
+ }
+
private static abstract class CoprocessorOperation
extends ObserverContext<RegionServerCoprocessorEnvironment> {
public CoprocessorOperation() {
@@ -168,6 +190,18 @@ public void postEnvCall(RegionServerEnvironment env) {
}
}
+ private static abstract class CoprocessOperationWithResult<T> extends CoprocessorOperation {
+ private T result = null;
+
+ public void setResult(final T result) {
+ this.result = result;
+ }
+
+ public T getResult() {
+ return this.result;
+ }
+ }
+
private boolean execOperation(final CoprocessorOperation ctx) throws IOException {
if (ctx == null) return false;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
index a2f16675e6db..cb0f6ce68ff1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
@@ -39,11 +39,13 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Stoppable;
+import org.apache.hadoop.hbase.Server;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.regionserver.HRegionServer;
+import org.apache.hadoop.hbase.regionserver.RegionServerCoprocessorHost;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
import org.apache.hadoop.hbase.replication.ReplicationException;
import org.apache.hadoop.hbase.replication.ReplicationListener;
@@ -84,7 +86,7 @@ public class ReplicationSourceManager implements ReplicationListener {
// UUID for this cluster
private final UUID clusterId;
// All about stopping
- private final Stoppable stopper;
+ private final Server server;
// All logs we are currently tracking
private final Map<String, SortedSet<String>> hlogsById;
// Logs for recovered sources we are currently tracking
@@ -111,7 +113,7 @@ public class ReplicationSourceManager implements ReplicationListener {
* @param replicationPeers
* @param replicationTracker
* @param conf the configuration to use
- * @param stopper the stopper object for this region server
+ * @param server the server for this region server
* @param fs the file system to use
* @param logDir the directory that contains all hlog directories of live RSs
* @param oldLogDir the directory where old logs are archived
@@ -119,7 +121,7 @@ public class ReplicationSourceManager implements ReplicationListener {
*/
public ReplicationSourceManager(final ReplicationQueues replicationQueues,
final ReplicationPeers replicationPeers, final ReplicationTracker replicationTracker,
- final Configuration conf, final Stoppable stopper, final FileSystem fs, final Path logDir,
+ final Configuration conf, final Server server, final FileSystem fs, final Path logDir,
final Path oldLogDir, final UUID clusterId) {
//CopyOnWriteArrayList is thread-safe.
//Generally, reading is more than modifying.
@@ -127,7 +129,7 @@ public ReplicationSourceManager(final ReplicationQueues replicationQueues,
this.replicationQueues = replicationQueues;
this.replicationPeers = replicationPeers;
this.replicationTracker = replicationTracker;
- this.stopper = stopper;
+ this.server = server;
this.hlogsById = new HashMap<String, SortedSet<String>>();
this.hlogsByIdRecoveredQueues = new ConcurrentHashMap<String, SortedSet<String>>();
this.oldsources = new CopyOnWriteArrayList<ReplicationSourceInterface>();
@@ -243,7 +245,7 @@ protected ReplicationSourceInterface addSource(String id) throws IOException,
ReplicationPeer peer = replicationPeers.getPeer(id);
ReplicationSourceInterface src =
getReplicationSource(this.conf, this.fs, this, this.replicationQueues,
- this.replicationPeers, stopper, id, this.clusterId, peerConfig, peer);
+ this.replicationPeers, server, id, this.clusterId, peerConfig, peer);
synchronized (this.hlogsById) {
this.sources.add(src);
this.hlogsById.put(id, new TreeSet<String>());
@@ -257,7 +259,7 @@ protected ReplicationSourceInterface addSource(String id) throws IOException,
String message =
"Cannot add log to queue when creating a new source, queueId="
+ src.getPeerClusterZnode() + ", filename=" + name;
- stopper.stop(message);
+ server.stop(message);
throw e;
}
src.enqueueLog(this.latestPath);
@@ -359,7 +361,7 @@ void postLogRoll(Path newLog) throws IOException {
* @param conf the configuration to use
* @param fs the file system to use
* @param manager the manager to use
- * @param stopper the stopper object for this region server
+ * @param server the server object for this region server
* @param peerId the id of the peer cluster
* @return the created source
* @throws IOException
@@ -367,9 +369,13 @@ void postLogRoll(Path newLog) throws IOException {
protected ReplicationSourceInterface getReplicationSource(final Configuration conf,
final FileSystem fs, final ReplicationSourceManager manager,
final ReplicationQueues replicationQueues, final ReplicationPeers replicationPeers,
- final Stoppable stopper, final String peerId, final UUID clusterId,
+ final Server server, final String peerId, final UUID clusterId,
final ReplicationPeerConfig peerConfig, final ReplicationPeer replicationPeer)
throws IOException {
+ RegionServerCoprocessorHost rsServerHost = null;
+ if (server instanceof HRegionServer) {
+ rsServerHost = ((HRegionServer) server).getRegionServerCoprocessorHost();
+ }
ReplicationSourceInterface src;
try {
@SuppressWarnings("rawtypes")
@@ -392,6 +398,14 @@ protected ReplicationSourceInterface getReplicationSource(final Configuration co
@SuppressWarnings("rawtypes")
Class c = Class.forName(replicationEndpointImpl);
replicationEndpoint = (ReplicationEndpoint) c.newInstance();
+ if(rsServerHost != null) {
+ ReplicationEndpoint newReplicationEndPoint = rsServerHost
+ .postCreateReplicationEndPoint(replicationEndpoint);
+ if(newReplicationEndPoint != null) {
+ // Override the newly created endpoint from the hook with configured end point
+ replicationEndpoint = newReplicationEndPoint;
+ }
+ }
} catch (Exception e) {
LOG.warn("Passed replication endpoint implementation throws errors", e);
throw new IOException(e);
@@ -399,7 +413,7 @@ protected ReplicationSourceInterface getReplicationSource(final Configuration co
MetricsSource metrics = new MetricsSource(peerId);
// init replication source
- src.init(conf, fs, manager, replicationQueues, replicationPeers, stopper, peerId,
+ src.init(conf, fs, manager, replicationQueues, replicationPeers, server, peerId,
clusterId, replicationEndpoint, metrics);
// init replication endpoint
@@ -542,7 +556,7 @@ public void run() {
Thread.currentThread().interrupt();
}
// We try to lock that rs' queue directory
- if (stopper.isStopped()) {
+ if (server.isStopped()) {
LOG.info("Not transferring queue since we are shutting down");
return;
}
@@ -578,7 +592,7 @@ public void run() {
ReplicationSourceInterface src =
getReplicationSource(conf, fs, ReplicationSourceManager.this, this.rq, this.rp,
- stopper, peerId, this.clusterId, peerConfig, peer);
+ server, peerId, this.clusterId, peerConfig, peer);
if (!this.rp.getPeerIds().contains((src.getPeerClusterId()))) {
src.terminate("Recovered queue doesn't belong to any current peer");
break;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index 95cd72ac6db0..96c912a2efb0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -30,7 +30,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
@@ -45,13 +45,13 @@
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
+import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.Tag;
-import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
@@ -91,6 +91,7 @@
import org.apache.hadoop.hbase.regionserver.ScanType;
import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
+import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
@@ -2252,4 +2253,10 @@ public void preSetNamespaceQuota(final ObserverContext<MasterCoprocessorEnvironm
final String namespace, final Quotas quotas) throws IOException {
requirePermission("setNamespaceQuota", Action.ADMIN);
}
+
+ @Override
+ public ReplicationEndpoint postCreateReplicationEndPoint(
+ ObserverContext<RegionServerCoprocessorEnvironment> ctx, ReplicationEndpoint endpoint) {
+ return endpoint;
+ }
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.