commit_id
stringlengths 40
40
| project
stringclasses 11
values | commit_message
stringlengths 3
3.04k
| type
stringclasses 3
values | url
stringclasses 11
values | git_diff
stringlengths 555
691k
|
|---|---|---|---|---|---|
9aa093303a2580c5cd165e95b0d59062ec9ec835
|
restlet-framework-java
|
- Initial code for new default HTTP connector and- SIP connector.--
|
a
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet/src/org/restlet/engine/http/connector/ClientConnection.java b/modules/org.restlet/src/org/restlet/engine/http/connector/ClientConnection.java
index 0cadd7f228..faafd5fbb2 100644
--- a/modules/org.restlet/src/org/restlet/engine/http/connector/ClientConnection.java
+++ b/modules/org.restlet/src/org/restlet/engine/http/connector/ClientConnection.java
@@ -297,6 +297,8 @@ protected void writeMessageHeadLine(Response message,
headStream.write(' ');
headStream.write(getRequestUri(request.getResourceRef()).getBytes());
headStream.write(' ');
+ headStream.write(request.getProtocol().getName().getBytes());
+ headStream.write('/');
headStream.write(request.getProtocol().getVersion().getBytes());
HeaderUtils.writeCRLF(getOutboundStream());
}
|
4b4d4fe741e9396560b186427a15faf803d423de
|
ReactiveX-RxJava
|
subscribe methods with typed Action arguments--
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/observables/Observable.java b/rxjava-core/src/main/java/rx/observables/Observable.java
index 7e0e50b35c..a877081949 100644
--- a/rxjava-core/src/main/java/rx/observables/Observable.java
+++ b/rxjava-core/src/main/java/rx/observables/Observable.java
@@ -46,6 +46,8 @@
import rx.observables.operations.OperationToObservableList;
import rx.observables.operations.OperationToObservableSortedList;
import rx.observables.operations.OperationZip;
+import rx.util.Action0;
+import rx.util.Action1;
import rx.util.Func1;
import rx.util.Func2;
import rx.util.Func3;
@@ -133,6 +135,7 @@ public Subscription subscribe(final Object o) {
// in case a dynamic language is not correctly handling the overloaded methods and we receive an Observer just forward to the correct method.
return subscribe((Observer) o);
}
+
return subscribe(new Observer() {
public void onCompleted() {
@@ -154,6 +157,29 @@ public void onNext(Object args) {
});
}
+ public Subscription subscribe(final Action1<T> onNext) {
+
+ return subscribe(new Observer<T>() {
+
+ public void onCompleted() {
+ // do nothing
+ }
+
+ public void onError(Exception e) {
+ handleError(e);
+ // no callback defined
+ }
+
+ public void onNext(T args) {
+ if (onNext == null) {
+ throw new RuntimeException("onNext must be implemented");
+ }
+ onNext.call(args);
+ }
+
+ });
+ }
+
@SuppressWarnings({ "rawtypes", "unchecked" })
public Subscription subscribe(final Object onNext, final Object onError) {
return subscribe(new Observer() {
@@ -179,6 +205,31 @@ public void onNext(Object args) {
});
}
+ public Subscription subscribe(final Action1<T> onNext, final Action1<Exception> onError) {
+
+ return subscribe(new Observer<T>() {
+
+ public void onCompleted() {
+ // do nothing
+ }
+
+ public void onError(Exception e) {
+ handleError(e);
+ if (onError != null) {
+ onError.call(e);
+ }
+ }
+
+ public void onNext(T args) {
+ if (onNext == null) {
+ throw new RuntimeException("onNext must be implemented");
+ }
+ onNext.call(args);
+ }
+
+ });
+ }
+
@SuppressWarnings({ "rawtypes", "unchecked" })
public Subscription subscribe(final Object onNext, final Object onError, final Object onComplete) {
return subscribe(new Observer() {
@@ -206,6 +257,31 @@ public void onNext(Object args) {
});
}
+ public Subscription subscribe(final Action1<T> onNext, final Action1<Exception> onError, final Action0 onComplete) {
+
+ return subscribe(new Observer<T>() {
+
+ public void onCompleted() {
+ onComplete.call();
+ }
+
+ public void onError(Exception e) {
+ handleError(e);
+ if (onError != null) {
+ onError.call(e);
+ }
+ }
+
+ public void onNext(T args) {
+ if (onNext == null) {
+ throw new RuntimeException("onNext must be implemented");
+ }
+ onNext.call(args);
+ }
+
+ });
+ }
+
/**
* When an error occurs in any Observer we will invoke this to allow it to be handled by the global APIObservableErrorHandler
*
|
00db7d150b22031a0c030d55f1395e0ba41c1c76
|
kotlin
|
Fix KT-10472: compare all overloads including- varargs in a single pass.--
|
c
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/results/OverloadingConflictResolver.kt b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/results/OverloadingConflictResolver.kt
index 312f8af24f805..70972eb033aa8 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/results/OverloadingConflictResolver.kt
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/calls/results/OverloadingConflictResolver.kt
@@ -88,17 +88,9 @@ class OverloadingConflictResolver(private val builtIns: KotlinBuiltIns) {
CandidateCallWithArgumentMapping.create(candidateCall) { it.arguments.filter { it.getArgumentExpression() != null } }
}
- val (varargCandidates, regularCandidates) = conflictingCandidates.partition { it.resultingDescriptor.hasVarargs }
- val mostSpecificRegularCandidates = regularCandidates.selectMostSpecificCallsWithArgumentMapping(discriminateGenericDescriptors)
-
- return when {
- mostSpecificRegularCandidates.size > 1 ->
- null
- mostSpecificRegularCandidates.size == 1 ->
- mostSpecificRegularCandidates.single()
- else ->
- varargCandidates.selectMostSpecificCallsWithArgumentMapping(discriminateGenericDescriptors).singleOrNull()
- }
+ val mostSpecificCandidates = conflictingCandidates.selectMostSpecificCallsWithArgumentMapping(discriminateGenericDescriptors)
+
+ return mostSpecificCandidates.singleOrNull()
}
private fun <D : CallableDescriptor, K> Collection<CandidateCallWithArgumentMapping<D, K>>.selectMostSpecificCallsWithArgumentMapping(
@@ -139,7 +131,7 @@ class OverloadingConflictResolver(private val builtIns: KotlinBuiltIns) {
/**
* Returns `true` if `d1` is definitely not less specific than `d2`,
- * `false` if `d1` is definitely less specific than `d2` or undecided.
+ * `false` otherwise.
*/
private fun <D : CallableDescriptor, K> compareCallsWithArgumentMapping(
call1: CandidateCallWithArgumentMapping<D, K>,
@@ -160,6 +152,11 @@ class OverloadingConflictResolver(private val builtIns: KotlinBuiltIns) {
return it
}
+ val hasVarargs1 = call1.resultingDescriptor.hasVarargs
+ val hasVarargs2 = call2.resultingDescriptor.hasVarargs
+ if (hasVarargs1 && !hasVarargs2) return false
+ if (!hasVarargs1 && hasVarargs2) return true
+
assert(call1.argumentsCount == call2.argumentsCount) {
"$call1 and $call2 have different number of explicit arguments"
}
diff --git a/compiler/testData/diagnostics/tests/resolve/overloadConflicts/kt10472.kt b/compiler/testData/diagnostics/tests/resolve/overloadConflicts/kt10472.kt
new file mode 100644
index 0000000000000..62c58aaf5885f
--- /dev/null
+++ b/compiler/testData/diagnostics/tests/resolve/overloadConflicts/kt10472.kt
@@ -0,0 +1,13 @@
+// !DIAGNOSTICS: -UNUSED_PARAMETER
+
+object Right
+object Wrong
+
+interface A<T>
+interface B<T> : A<T>
+
+fun <T> foo(vararg t: T) = Wrong
+fun <T> foo(t: A<T>) = Wrong
+fun <T> foo(t: B<T>) = Right
+
+fun test(b: B<Int>): Right = foo(b)
diff --git a/compiler/testData/diagnostics/tests/resolve/overloadConflicts/kt10472.txt b/compiler/testData/diagnostics/tests/resolve/overloadConflicts/kt10472.txt
new file mode 100644
index 0000000000000..34b5389048f94
--- /dev/null
+++ b/compiler/testData/diagnostics/tests/resolve/overloadConflicts/kt10472.txt
@@ -0,0 +1,32 @@
+package
+
+public fun </*0*/ T> foo(/*0*/ t: A<T>): Wrong
+public fun </*0*/ T> foo(/*0*/ t: B<T>): Right
+public fun </*0*/ T> foo(/*0*/ vararg t: T /*kotlin.Array<out T>*/): Wrong
+public fun test(/*0*/ b: B<kotlin.Int>): Right
+
+public interface A</*0*/ T> {
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+}
+
+public interface B</*0*/ T> : A<T> {
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+}
+
+public object Right {
+ private constructor Right()
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+}
+
+public object Wrong {
+ private constructor Wrong()
+ public open override /*1*/ /*fake_override*/ fun equals(/*0*/ other: kotlin.Any?): kotlin.Boolean
+ public open override /*1*/ /*fake_override*/ fun hashCode(): kotlin.Int
+ public open override /*1*/ /*fake_override*/ fun toString(): kotlin.String
+}
diff --git a/compiler/tests/org/jetbrains/kotlin/checkers/DiagnosticsTestGenerated.java b/compiler/tests/org/jetbrains/kotlin/checkers/DiagnosticsTestGenerated.java
index 3e6e225e8d22e..c04cf4841aec1 100644
--- a/compiler/tests/org/jetbrains/kotlin/checkers/DiagnosticsTestGenerated.java
+++ b/compiler/tests/org/jetbrains/kotlin/checkers/DiagnosticsTestGenerated.java
@@ -13742,6 +13742,12 @@ public void testJavaOverloadedVarargs() throws Exception {
doTest(fileName);
}
+ @TestMetadata("kt10472.kt")
+ public void testKt10472() throws Exception {
+ String fileName = KotlinTestUtils.navigationMetadata("compiler/testData/diagnostics/tests/resolve/overloadConflicts/kt10472.kt");
+ doTest(fileName);
+ }
+
@TestMetadata("numberOfDefaults.kt")
public void testNumberOfDefaults() throws Exception {
String fileName = KotlinTestUtils.navigationMetadata("compiler/testData/diagnostics/tests/resolve/overloadConflicts/numberOfDefaults.kt");
|
272895e16604dc7827da484be057cdfbe988bc7c
|
restlet-framework-java
|
Removed unnecessary instruction.--
|
p
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet/src/org/restlet/engine/http/connector/ServerConnection.java b/modules/org.restlet/src/org/restlet/engine/http/connector/ServerConnection.java
index 9e567dc11a..3aeb3b7988 100644
--- a/modules/org.restlet/src/org/restlet/engine/http/connector/ServerConnection.java
+++ b/modules/org.restlet/src/org/restlet/engine/http/connector/ServerConnection.java
@@ -231,7 +231,6 @@ requestUri, version, headers, createInboundEntity(headers),
// Update the response
response.getServerInfo().setAddress(
getHelper().getHelped().getAddress());
- response.getServerInfo().setAgent(Engine.VERSION_HEADER);
response.getServerInfo().setPort(getHelper().getHelped().getPort());
if (request != null) {
|
fdc7205adae52d9e2d928d06faacf9cc9f216b55
|
camel
|
CAMEL-4176: Fixed fallback to use http4 or http4s- for proxy scheme when configured as property on CamelContext properties.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1144310 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/camel
|
diff --git a/components/camel-http4/src/main/java/org/apache/camel/component/http4/HttpComponent.java b/components/camel-http4/src/main/java/org/apache/camel/component/http4/HttpComponent.java
index f6431545bdbd1..d90a257eddac2 100644
--- a/components/camel-http4/src/main/java/org/apache/camel/component/http4/HttpComponent.java
+++ b/components/camel-http4/src/main/java/org/apache/camel/component/http4/HttpComponent.java
@@ -22,6 +22,7 @@
import org.apache.camel.Endpoint;
import org.apache.camel.ResolveEndpointFailedException;
+import org.apache.camel.component.http4.helper.HttpHelper;
import org.apache.camel.impl.HeaderFilterStrategyComponent;
import org.apache.camel.util.CastUtils;
import org.apache.camel.util.IntrospectionSupport;
@@ -182,7 +183,7 @@ protected Endpoint createEndpoint(String uri, String remaining, Map<String, Obje
sslContextParameters = this.sslContextParameters;
}
- boolean secure = isSecureConnection(uri);
+ boolean secure = HttpHelper.isSecureConnection(uri);
// create the configurer to use for this endpoint
HttpClientConfigurer configurer = createHttpClientConfigurer(parameters, secure);
@@ -301,10 +302,6 @@ protected HttpParams configureHttpParams(Map<String, Object> parameters) throws
return clientParams;
}
- private boolean isSecureConnection(String uri) {
- return uri.startsWith("https");
- }
-
@Override
protected boolean useIntrospectionOnEndpoint() {
return false;
diff --git a/components/camel-http4/src/main/java/org/apache/camel/component/http4/HttpEndpoint.java b/components/camel-http4/src/main/java/org/apache/camel/component/http4/HttpEndpoint.java
index 4ba78fba70a5b..c0a87d5da7e98 100644
--- a/components/camel-http4/src/main/java/org/apache/camel/component/http4/HttpEndpoint.java
+++ b/components/camel-http4/src/main/java/org/apache/camel/component/http4/HttpEndpoint.java
@@ -21,6 +21,7 @@
import org.apache.camel.PollingConsumer;
import org.apache.camel.Producer;
+import org.apache.camel.component.http4.helper.HttpHelper;
import org.apache.camel.impl.DefaultPollingEndpoint;
import org.apache.camel.spi.HeaderFilterStrategy;
import org.apache.camel.spi.HeaderFilterStrategyAware;
@@ -117,6 +118,10 @@ protected HttpClient createHttpClient() {
String host = getCamelContext().getProperties().get("http.proxyHost");
int port = Integer.parseInt(getCamelContext().getProperties().get("http.proxyPort"));
String scheme = getCamelContext().getProperties().get("http.proxyScheme");
+ // fallback and use either http4 or https4 depending on secure
+ if (scheme == null) {
+ scheme = HttpHelper.isSecureConnection(getEndpointUri()) ? "https4" : "http4";
+ }
LOG.debug("CamelContext properties http.proxyHost, http.proxyPort, and http.proxyScheme detected. Using http proxy host: {} port: {} scheme: {}", new Object[]{host, port, scheme});
HttpHost proxy = new HttpHost(host, port, scheme);
diff --git a/components/camel-http4/src/main/java/org/apache/camel/component/http4/helper/HttpHelper.java b/components/camel-http4/src/main/java/org/apache/camel/component/http4/helper/HttpHelper.java
index 0da9eca33d46b..775d25a36b9da 100644
--- a/components/camel-http4/src/main/java/org/apache/camel/component/http4/helper/HttpHelper.java
+++ b/components/camel-http4/src/main/java/org/apache/camel/component/http4/helper/HttpHelper.java
@@ -265,6 +265,10 @@ public static HttpVersion parserHttpVersion(String s) throws ProtocolException {
throw new ProtocolException("Invalid HTTP minor version number: " + s);
}
return new HttpVersion(major, minor);
+ }
+ public static boolean isSecureConnection(String uri) {
+ return uri.startsWith("https");
}
+
}
|
f16e39ecb1dcb4d5964235ef94d84ab4d70ac314
|
hadoop
|
Merge -c 1529538 from trunk to branch-2 to fix- YARN-1090. Fixed CS UI to better reflect applications as non-schedulable and- not as pending. Contributed by Jian He.--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1529539 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 6a997b42ecf8d..4babceb873418 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -166,6 +166,9 @@ Release 2.1.2 - UNRELEASED
YARN-1032. Fixed NPE in RackResolver. (Lohit Vijayarenu via acmurthy)
+ YARN-1090. Fixed CS UI to better reflect applications as non-schedulable
+ and not as pending. (Jian He via acmurthy)
+
Release 2.1.1-beta - 2013-09-23
INCOMPATIBLE CHANGES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/QueueMetrics.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/QueueMetrics.java
index 9d2c739e480cf..8a030952504fb 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/QueueMetrics.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/QueueMetrics.java
@@ -73,7 +73,7 @@ public class QueueMetrics implements MetricsSource {
@Metric("Reserved CPU in virtual cores") MutableGaugeInt reservedVCores;
@Metric("# of reserved containers") MutableGaugeInt reservedContainers;
@Metric("# of active users") MutableGaugeInt activeUsers;
- @Metric("# of active users") MutableGaugeInt activeApplications;
+ @Metric("# of active applications") MutableGaugeInt activeApplications;
private final MutableGaugeInt[] runningTime;
private TimeBucketMetrics<ApplicationId> runBuckets;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
index 0bf851722e218..900c1a62ddade 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
@@ -98,24 +98,25 @@ protected void render(Block html) {
for (UserInfo entry: users) {
activeUserList.append(entry.getUsername()).append(" <")
.append(getPercentage(entry.getResourcesUsed(), usedResources))
- .append(", Active Apps: " + entry.getNumActiveApplications())
- .append(", Pending Apps: " + entry.getNumPendingApplications())
+ .append(", Schedulable Apps: " + entry.getNumActiveApplications())
+ .append(", Non-Schedulable Apps: " + entry.getNumPendingApplications())
.append("><br style='display:block'>"); //Force line break
}
ResponseInfo ri = info("\'" + lqinfo.getQueuePath().substring(5) + "\' Queue Status").
_("Queue State:", lqinfo.getQueueState()).
_("Used Capacity:", percent(lqinfo.getUsedCapacity() / 100)).
+ _("Absolute Used Capacity:", percent(lqinfo.getAbsoluteUsedCapacity() / 100)).
_("Absolute Capacity:", percent(lqinfo.getAbsoluteCapacity() / 100)).
_("Absolute Max Capacity:", percent(lqinfo.getAbsoluteMaxCapacity() / 100)).
_("Used Resources:", StringEscapeUtils.escapeHtml(lqinfo.getUsedResources().toString())).
- _("Num Active Applications:", Integer.toString(lqinfo.getNumActiveApplications())).
- _("Num Pending Applications:", Integer.toString(lqinfo.getNumPendingApplications())).
+ _("Num Schedulable Applications:", Integer.toString(lqinfo.getNumActiveApplications())).
+ _("Num Non-Schedulable Applications:", Integer.toString(lqinfo.getNumPendingApplications())).
_("Num Containers:", Integer.toString(lqinfo.getNumContainers())).
_("Max Applications:", Integer.toString(lqinfo.getMaxApplications())).
_("Max Applications Per User:", Integer.toString(lqinfo.getMaxApplicationsPerUser())).
- _("Max Active Applications:", Integer.toString(lqinfo.getMaxActiveApplications())).
- _("Max Active Applications Per User:", Integer.toString(lqinfo.getMaxActiveApplicationsPerUser())).
+ _("Max Schedulable Applications:", Integer.toString(lqinfo.getMaxActiveApplications())).
+ _("Max Schedulable Applications Per User:", Integer.toString(lqinfo.getMaxActiveApplicationsPerUser())).
_("Configured Capacity:", percent(lqinfo.getCapacity() / 100)).
_("Configured Max Capacity:", percent(lqinfo.getMaxCapacity() / 100)).
_("Configured Minimum User Limit Percent:", Integer.toString(lqinfo.getUserLimit()) + "%").
|
3161a555e1e898786041b84b98f0c45630b7c78e
|
hbase
|
HBASE-3070 Add to hbaseadmin means of shutting- down a regionserver--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1003702 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index d5099dc0aa04..99088c3ae308 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -965,6 +965,7 @@ Release 0.21.0 - Unreleased
HBASE-3066 We don't put the port for hregionserver up into znode since
new master
HBASE-2825 Scans respect row locks
+ HBASE-3070 Add to hbaseadmin means of shutting down a regionserver
NEW FEATURES
HBASE-1961 HBase EC2 scripts
diff --git a/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index 279ee6ba3084..c2ba277fff69 100644
--- a/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -1006,6 +1006,16 @@ public synchronized void stopMaster() throws IOException {
}
}
+ /**
+ * Stop the designated regionserver.
+ * @throws IOException if a remote or network exception occurs
+ */
+ public synchronized void stopRegionServer(final HServerAddress hsa)
+ throws IOException {
+ HRegionInterface rs = this.connection.getHRegionConnection(hsa);
+ rs.stop("Called by admin client " + this.connection.toString());
+ }
+
/**
* @return cluster status
* @throws IOException if a remote or network exception occurs
diff --git a/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java b/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
index fbdec0b375ca..dc3f68c90e0c 100644
--- a/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
+++ b/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
@@ -64,7 +64,6 @@
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.hbase.zookeeper.RootRegionTracker;
import org.apache.hadoop.hbase.zookeeper.ZKTableDisable;
-import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.zookeeper.KeeperException;
@@ -271,6 +270,12 @@ public HConnectionImplementation(Configuration conf)
this.masterChecked = false;
}
+ @Override
+ public String toString() {
+ // Return our zk identifier ... it 'hconnection + zk sessionid'.
+ return this.zooKeeper.toString();
+ }
+
private long getPauseTime(int tries) {
int ntries = tries;
if (ntries >= HConstants.RETRY_BACKOFF.length) {
|
efa5448aa3dd8afae7d1c5f3eb8798befb247e41
|
hbase
|
HBASE-3429 HBaseObjectWritable should support- arrays of any Writable or Serializable, not just Writable[]--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1056548 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index 1669735cb1d8..9843b6c1d928 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -46,6 +46,8 @@ Release 0.91.0 - Unreleased
(Jesse Yates via Stack)
HBASE-3393 Update Avro gateway to use Avro 1.4.1 and the new
server.join() method (Jeff Hammerbacher via Stack)
+ HBASE-3429 HBaseObjectWritable should support arrays of any Writable
+ or Serializable, not just Writable[] (Ed Kohlwey via Stack)
NEW FEATURES
diff --git a/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java b/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
index ba9db37d8718..5b6bf2db526e 100644
--- a/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
+++ b/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
@@ -84,6 +84,7 @@
import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Classes;
import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.ObjectWritable;
import org.apache.hadoop.io.Text;
@@ -382,7 +383,9 @@ public static void writeObject(DataOutput out, Object instance,
declClass = Writable.class;
}
writeClassCode(out, declClass);
- if (declClass.isArray()) { // array
+ // used to just be arrays. Now check for ones for which there's a
+ // class code, and let the others get scooped up by serializable.
+ if (declClass.isArray() && CLASS_TO_CODE.get(declClass)!=null) { // array
// If bytearray, just dump it out -- avoid the recursion and
// byte-at-a-time we were previously doing.
if (declClass.equals(byte [].class)) {
@@ -449,18 +452,27 @@ public static void writeObject(DataOutput out, Object instance,
} else {
writeClassCode(out, c);
}
- ByteArrayOutputStream bos = null;
- ObjectOutputStream oos = null;
- try{
- bos = new ByteArrayOutputStream();
- oos = new ObjectOutputStream(bos);
- oos.writeObject(instanceObj);
- byte[] value = bos.toByteArray();
- out.writeInt(value.length);
- out.write(value);
- } finally {
- if(bos!=null) bos.close();
- if(oos!=null) oos.close();
+ if(declClass.isArray()){
+ int length = Array.getLength(instanceObj);
+ out.writeInt(length);
+ for (int i = 0; i < length; i++) {
+ writeObject(out, Array.get(instanceObj, i),
+ declClass.getComponentType(), conf);
+ }
+ } else {
+ ByteArrayOutputStream bos = null;
+ ObjectOutputStream oos = null;
+ try{
+ bos = new ByteArrayOutputStream();
+ oos = new ObjectOutputStream(bos);
+ oos.writeObject(instanceObj);
+ byte[] value = bos.toByteArray();
+ out.writeInt(value.length);
+ out.write(value);
+ } finally {
+ if(bos!=null) bos.close();
+ if(oos!=null) oos.close();
+ }
}
} else {
throw new IOException("Can't write: "+instanceObj+" as "+declClass);
@@ -569,21 +581,29 @@ public static Object readObject(DataInput in,
instance = null;
}
} else {
- int length = in.readInt();
- byte[] objectBytes = new byte[length];
- in.readFully(objectBytes);
- ByteArrayInputStream bis = null;
- ObjectInputStream ois = null;
- try {
- bis = new ByteArrayInputStream(objectBytes);
- ois = new ObjectInputStream(bis);
- instance = ois.readObject();
- } catch (ClassNotFoundException e) {
- LOG.error("Error in readFields", e);
- throw new IOException("Error in readFields", e);
- } finally {
- if(bis!=null) bis.close();
- if(ois!=null) ois.close();
+ if(instanceClass.isArray()){
+ int length = in.readInt();
+ instance = Array.newInstance(instanceClass.getComponentType(), length);
+ for(int i = 0; i< length; i++){
+ Array.set(instance, i, HbaseObjectWritable.readObject(in, conf));
+ }
+ } else {
+ int length = in.readInt();
+ byte[] objectBytes = new byte[length];
+ in.readFully(objectBytes);
+ ByteArrayInputStream bis = null;
+ ObjectInputStream ois = null;
+ try {
+ bis = new ByteArrayInputStream(objectBytes);
+ ois = new ObjectInputStream(bis);
+ instance = ois.readObject();
+ } catch (ClassNotFoundException e) {
+ LOG.error("Error in readFields", e);
+ throw new IOException("Error in readFields", e);
+ } finally {
+ if(bis!=null) bis.close();
+ if(ois!=null) ois.close();
+ }
}
}
}
diff --git a/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java b/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java
index 6bce5cd3e94f..2f0fa7dbad1f 100644
--- a/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java
+++ b/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java
@@ -21,6 +21,7 @@
import java.io.*;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.List;
import junit.framework.TestCase;
@@ -122,6 +123,20 @@ public void testCustomWritable() throws Exception {
assertEquals("mykey", ((CustomFilter)child).getKey());
}
+ public void testCustomWritableArray() throws Exception {
+ Configuration conf = HBaseConfiguration.create();
+
+ // test proper serialization of un-encoded custom writables
+ CustomWritable custom1 = new CustomWritable("test phrase");
+ CustomWritable custom2 = new CustomWritable("test phrase2");
+ CustomWritable[] customs = {custom1, custom2};
+ Object obj = doType(conf, customs, CustomWritable[].class);
+
+ assertTrue("Arrays should match " + Arrays.toString(customs) + ", "
+ + Arrays.toString((Object[]) obj),
+ Arrays.equals(customs, (Object[])obj));
+ }
+
public void testCustomSerializable() throws Exception {
Configuration conf = HBaseConfiguration.create();
@@ -132,6 +147,20 @@ public void testCustomSerializable() throws Exception {
assertTrue(obj instanceof CustomSerializable);
assertEquals("test phrase", ((CustomSerializable)obj).getValue());
}
+
+
+ public void testCustomSerializableArray() throws Exception {
+ Configuration conf = HBaseConfiguration.create();
+
+ // test proper serialization of un-encoded serialized java objects
+ CustomSerializable custom1 = new CustomSerializable("test phrase");
+ CustomSerializable custom2 = new CustomSerializable("test phrase2");
+ CustomSerializable[] custom = {custom1, custom2};
+ Object obj = doType(conf, custom, CustomSerializable[].class);
+ assertTrue("Arrays should match " + Arrays.toString(custom) + ", "
+ + Arrays.toString((Object[]) obj),
+ Arrays.equals(custom, (Object[]) obj));
+ }
private Object doType(final Configuration conf, final Object value,
final Class<?> clazz)
@@ -149,7 +178,7 @@ private Object doType(final Configuration conf, final Object value,
}
public static class CustomSerializable implements Serializable {
- private static final long serialVersionUID = 1048445561865740632L;
+ private static final long serialVersionUID = 1048445561865740633L;
private String value = null;
public CustomSerializable() {
@@ -167,6 +196,21 @@ public void setValue(String value) {
this.value = value;
}
+ @Override
+ public boolean equals(Object obj) {
+ return (obj instanceof CustomSerializable) && ((CustomSerializable)obj).value.equals(value);
+ }
+
+ @Override
+ public int hashCode() {
+ return value.hashCode();
+ }
+
+ @Override
+ public String toString() {
+ return "<" + value + ">";
+ }
+
}
public static class CustomWritable implements Writable {
@@ -190,6 +234,21 @@ public void write(DataOutput out) throws IOException {
public void readFields(DataInput in) throws IOException {
this.value = Text.readString(in);
}
+
+ @Override
+ public boolean equals(Object obj) {
+ return (obj instanceof CustomWritable) && ((CustomWritable)obj).value.equals(value);
+ }
+
+ @Override
+ public int hashCode() {
+ return value.hashCode();
+ }
+
+ @Override
+ public String toString() {
+ return "<" + value + ">";
+ }
}
public static class CustomFilter extends FilterBase {
|
7972ae764b8aef1c2bc2cd07d4cd4f3b14a4aef2
|
hbase
|
HBASE-12137 Alter table add cf doesn't do- compression test (Virag Kothari)--
|
c
|
https://github.com/apache/hbase
|
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index eef0a09f3d0e..2a7120d5b255 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -1376,18 +1376,19 @@ public void truncateTable(TableName tableName, boolean preserveSplits) throws IO
}
@Override
- public void addColumn(final TableName tableName, final HColumnDescriptor column)
+ public void addColumn(final TableName tableName, final HColumnDescriptor columnDescriptor)
throws IOException {
checkInitialized();
+ checkCompression(columnDescriptor);
if (cpHost != null) {
- if (cpHost.preAddColumn(tableName, column)) {
+ if (cpHost.preAddColumn(tableName, columnDescriptor)) {
return;
}
}
//TODO: we should process this (and some others) in an executor
- new TableAddFamilyHandler(tableName, column, this, this).prepare().process();
+ new TableAddFamilyHandler(tableName, columnDescriptor, this, this).prepare().process();
if (cpHost != null) {
- cpHost.postAddColumn(tableName, column);
+ cpHost.postAddColumn(tableName, columnDescriptor);
}
}
|
9f374fea84e7e7072292fbd2c48da0b8c1ab7a4d
|
intellij-community
|
reorder instructions, maybe it'll fix strange- IMSE--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/dom/impl/src/com/intellij/util/xml/impl/DomInvocationHandler.java b/dom/impl/src/com/intellij/util/xml/impl/DomInvocationHandler.java
index 228e3254d03b5..72d67021db388 100644
--- a/dom/impl/src/com/intellij/util/xml/impl/DomInvocationHandler.java
+++ b/dom/impl/src/com/intellij/util/xml/impl/DomInvocationHandler.java
@@ -384,9 +384,12 @@ public void acceptChildren(DomElementVisitor visitor) {
@NotNull
protected final Converter getScalarConverter(final JavaMethod method) {
+ final Converter converter;
synchronized (myScalarConverters) {
- return myScalarConverters.get(method);
+ converter = myScalarConverters.get(method);
}
+ assert converter != null;
+ return converter;
}
public final T getChildDescription() {
|
dd897c807e495d1dbc76c21e9e944dbe030076b7
|
drools
|
JBRULES-1820 Exception: Input stream is not- explicitly closed.--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@36154 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/main/java/org/drools/commons/jci/compilers/EclipseJavaCompiler.java b/drools-compiler/src/main/java/org/drools/commons/jci/compilers/EclipseJavaCompiler.java
index e3405e92fa6..d445923812e 100644
--- a/drools-compiler/src/main/java/org/drools/commons/jci/compilers/EclipseJavaCompiler.java
+++ b/drools-compiler/src/main/java/org/drools/commons/jci/compilers/EclipseJavaCompiler.java
@@ -236,23 +236,24 @@ private NameEnvironmentAnswer findType( final String pClazzName ) {
}
}
-
- final InputStream is = pClassLoader.getResourceAsStream(resourceName);
- if (is == null) {
- return null;
- }
-
- final byte[] buffer = new byte[8192];
- final ByteArrayOutputStream baos = new ByteArrayOutputStream(buffer.length);
- int count;
+ InputStream is = null;
+ ByteArrayOutputStream baos = null;
try {
- while ((count = is.read(buffer, 0, buffer.length)) > 0) {
- baos.write(buffer, 0, count);
+ is = pClassLoader.getResourceAsStream(resourceName);
+ if (is == null) {
+ return null;
}
- baos.flush();
- final char[] fileName = pClazzName.toCharArray();
- final ClassFileReader classFileReader = new ClassFileReader(baos.toByteArray(), fileName, true);
- return new NameEnvironmentAnswer(classFileReader, null);
+
+ final byte[] buffer = new byte[8192];
+ baos = new ByteArrayOutputStream(buffer.length);
+ int count;
+ while ((count = is.read(buffer, 0, buffer.length)) > 0) {
+ baos.write(buffer, 0, count);
+ }
+ baos.flush();
+ final char[] fileName = pClazzName.toCharArray();
+ final ClassFileReader classFileReader = new ClassFileReader(baos.toByteArray(), fileName, true);
+ return new NameEnvironmentAnswer(classFileReader, null);
} catch ( final IOException e ) {
throw new RuntimeException( "could not read class",
e );
@@ -261,13 +262,17 @@ private NameEnvironmentAnswer findType( final String pClazzName ) {
e );
} finally {
try {
- baos.close();
+ if (baos != null ) {
+ baos.close();
+ }
} catch ( final IOException oe ) {
throw new RuntimeException( "could not close output stream",
oe );
}
try {
- is.close();
+ if ( is != null ) {
+ is.close();
+ }
} catch ( final IOException ie ) {
throw new RuntimeException( "could not close input stream",
ie );
@@ -276,19 +281,29 @@ private NameEnvironmentAnswer findType( final String pClazzName ) {
}
private boolean isPackage( final String pClazzName ) {
-
- final InputStream is = pClassLoader.getResourceAsStream(ClassUtils.convertClassToResourcePath(pClazzName));
- if (is != null) {
- return false;
- }
-
- // FIXME: this should not be tied to the extension
- final String source = pClazzName.replace('.', '/') + ".java";
- if (pReader.isAvailable(source)) {
- return false;
+ InputStream is = null;
+ try {
+ is = pClassLoader.getResourceAsStream(ClassUtils.convertClassToResourcePath(pClazzName));
+ if (is != null) {
+ return false;
+ }
+
+ // FIXME: this should not be tied to the extension
+ final String source = pClazzName.replace('.', '/') + ".java";
+ if (pReader.isAvailable(source)) {
+ return false;
+ }
+
+ return true;
+ } finally {
+ if ( is != null ) {
+ try {
+ is.close();
+ } catch ( IOException e ) {
+ throw new RuntimeException( "Unable to close stream for resource: " + pClazzName );
+ }
+ }
}
-
- return true;
}
public boolean isPackage( char[][] parentPackageName, char[] pPackageName ) {
|
64cb8af5792432c71315a95b72cdb48219a170a3
|
restlet-framework-java
|
Fixed issue -649, if needed, add base64 padding- characters to encoded md5 hash before decoding. Reported and contributed by- Andy Dennie.--
|
c
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet.test/src/org/restlet/test/engine/HeaderTestCase.java b/modules/org.restlet.test/src/org/restlet/test/engine/HeaderTestCase.java
index 08aec9e758..4b66087dc5 100644
--- a/modules/org.restlet.test/src/org/restlet/test/engine/HeaderTestCase.java
+++ b/modules/org.restlet.test/src/org/restlet/test/engine/HeaderTestCase.java
@@ -41,10 +41,15 @@
import org.restlet.data.Encoding;
import org.restlet.data.MediaType;
import org.restlet.engine.header.EncodingReader;
+import org.restlet.engine.header.Header;
+import org.restlet.engine.header.HeaderConstants;
import org.restlet.engine.header.HeaderReader;
+import org.restlet.engine.header.HeaderUtils;
import org.restlet.engine.header.PreferenceReader;
import org.restlet.engine.header.TokenReader;
+import org.restlet.engine.util.Base64;
import org.restlet.engine.util.DateUtils;
+import org.restlet.representation.Representation;
import org.restlet.test.RestletTestCase;
/**
@@ -99,6 +104,33 @@ public void testAddValues() {
assertEquals(l.size(), 1);
}
+ public void testExtracting() {
+ ArrayList<Header> headers = new ArrayList<Header>();
+ String md5hash = "aaaaaaaaaaaaaaaa";
+ // encodes to "YWFhYWFhYWFhYWFhYWFhYQ==", the "==" at the end is padding
+ String encodedWithPadding = Base64.encode(md5hash.getBytes(), false);
+ String encodedNoPadding = encodedWithPadding.substring(0, 22);
+
+ Header header = new Header(HeaderConstants.HEADER_CONTENT_MD5,
+ encodedWithPadding);
+ headers.add(header);
+
+ // extract Content-MD5 header with padded Base64 encoding, make sure it
+ // decodes to original hash
+ Representation rep = HeaderUtils.extractEntityHeaders(headers, null);
+ assertEquals(rep.getDigest().getAlgorithm(),
+ org.restlet.data.Digest.ALGORITHM_MD5);
+ assertEquals(new String(rep.getDigest().getValue()), md5hash);
+
+ // extract header with UNpadded encoding, make sure it also decodes to
+ // original hash
+ header.setValue(encodedNoPadding);
+ rep = HeaderUtils.extractEntityHeaders(headers, null);
+ assertEquals(rep.getDigest().getAlgorithm(),
+ org.restlet.data.Digest.ALGORITHM_MD5);
+ assertEquals(new String(rep.getDigest().getValue()), md5hash);
+ }
+
public void testInvalidDate() {
final String headerValue = "-1";
final Date date = DateUtils.parse(headerValue,
diff --git a/modules/org.restlet/src/org/restlet/engine/header/HeaderUtils.java b/modules/org.restlet/src/org/restlet/engine/header/HeaderUtils.java
index ebcc35fc30..8016d77aea 100644
--- a/modules/org.restlet/src/org/restlet/engine/header/HeaderUtils.java
+++ b/modules/org.restlet/src/org/restlet/engine/header/HeaderUtils.java
@@ -122,6 +122,7 @@ public class HeaderUtils {
HeaderConstants.HEADER_VARY, HeaderConstants.HEADER_VIA,
HeaderConstants.HEADER_WARNING,
HeaderConstants.HEADER_WWW_AUTHENTICATE)));
+
/**
* Set of unsupported headers that will be covered in future versions.
*/
@@ -631,7 +632,8 @@ public static void copyExtensionHeaders(Series<Header> headers,
// [ifndef gwt] instruction
extensionHeaders = new Series<Header>(Header.class);
// [ifdef gwt] instruction uncomment
- // extensionHeaders = new org.restlet.engine.util.HeaderSeries();
+ // extensionHeaders = new
+ // org.restlet.engine.util.HeaderSeries();
response.getAttributes().put(HeaderConstants.ATTRIBUTE_HEADERS,
extensionHeaders);
}
@@ -852,10 +854,19 @@ public static Representation extractEntityHeaders(Iterable<Header> headers,
} else if (header.getName().equalsIgnoreCase(
HeaderConstants.HEADER_CONTENT_MD5)) {
// [ifndef gwt]
+ // Since an MD5 hash is 128 bits long, its base64 encoding
+ // is 22 bytes if unpadded, or 24 bytes if padded. If the
+ // header value is unpadded, append two base64 padding
+ // characters ("==") before passing the value to
+ // Base64.decode(), which requires its input argument's
+ // length to be a multiple of four.
+ String base64hash = header.getValue();
+ if (base64hash.length() == 22) {
+ base64hash += "==";
+ }
result.setDigest(new org.restlet.data.Digest(
org.restlet.data.Digest.ALGORITHM_MD5,
- org.restlet.engine.util.Base64.decode(header
- .getValue())));
+ org.restlet.engine.util.Base64.decode(base64hash)));
entityHeaderFound = true;
// [enddef]
}
|
ced5a6c917416088d9def359edacf548509677f3
|
kotlin
|
Introduce RenderingContext and add as parameter- to DiagnosticParameterRenderer-render--RenderingContext holds data about the whole diagnostics allowing to adjust rendering of its parameters-
|
a
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/frontend.java/src/org/jetbrains/kotlin/resolve/jvm/diagnostics/DefaultErrorMessagesJvm.java b/compiler/frontend.java/src/org/jetbrains/kotlin/resolve/jvm/diagnostics/DefaultErrorMessagesJvm.java
index 8b059e3b5a9d7..50cd881fa597a 100644
--- a/compiler/frontend.java/src/org/jetbrains/kotlin/resolve/jvm/diagnostics/DefaultErrorMessagesJvm.java
+++ b/compiler/frontend.java/src/org/jetbrains/kotlin/resolve/jvm/diagnostics/DefaultErrorMessagesJvm.java
@@ -18,10 +18,7 @@
import org.jetbrains.annotations.NotNull;
import org.jetbrains.kotlin.descriptors.DeclarationDescriptor;
-import org.jetbrains.kotlin.diagnostics.rendering.DefaultErrorMessages;
-import org.jetbrains.kotlin.diagnostics.rendering.DiagnosticFactoryToRendererMap;
-import org.jetbrains.kotlin.diagnostics.rendering.DiagnosticParameterRenderer;
-import org.jetbrains.kotlin.diagnostics.rendering.Renderers;
+import org.jetbrains.kotlin.diagnostics.rendering.*;
import org.jetbrains.kotlin.renderer.DescriptorRenderer;
import java.util.ArrayList;
@@ -33,7 +30,7 @@ public class DefaultErrorMessagesJvm implements DefaultErrorMessages.Extension {
private static final DiagnosticParameterRenderer<ConflictingJvmDeclarationsData> CONFLICTING_JVM_DECLARATIONS_DATA = new DiagnosticParameterRenderer<ConflictingJvmDeclarationsData>() {
@NotNull
@Override
- public String render(@NotNull ConflictingJvmDeclarationsData data) {
+ public String render(@NotNull ConflictingJvmDeclarationsData data, @NotNull RenderingContext context) {
List<String> renderedDescriptors = new ArrayList<String>();
for (JvmDeclarationOrigin origin : data.getSignatureOrigins()) {
DeclarationDescriptor descriptor = origin.getDescriptor();
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/checkers/CheckerTestUtil.java b/compiler/frontend/src/org/jetbrains/kotlin/checkers/CheckerTestUtil.java
index 19d39f1d56d68..81df046e67e25 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/checkers/CheckerTestUtil.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/checkers/CheckerTestUtil.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2010-2015 JetBrains s.r.o.
+ * Copyright 2010-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/DefaultErrorMessages.java b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/DefaultErrorMessages.java
index 32f27ead4afa9..6482a9375f565 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/DefaultErrorMessages.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/DefaultErrorMessages.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2010-2015 JetBrains s.r.o.
+ * Copyright 2010-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -41,6 +41,7 @@
import static org.jetbrains.kotlin.diagnostics.Errors.*;
import static org.jetbrains.kotlin.diagnostics.rendering.Renderers.*;
+import static org.jetbrains.kotlin.diagnostics.rendering.RenderingContext.*;
public class DefaultErrorMessages {
@@ -118,11 +119,13 @@ public static DiagnosticRenderer getRendererForDiagnostic(@NotNull Diagnostic di
@NotNull
@Override
public String[] render(@NotNull TypeMismatchDueToTypeProjectionsData object) {
+ RenderingContext context =
+ of(object.getExpectedType(), object.getExpressionType(), object.getReceiverType(), object.getCallableDescriptor());
return new String[] {
- RENDER_TYPE.render(object.getExpectedType()),
- RENDER_TYPE.render(object.getExpressionType()),
- RENDER_TYPE.render(object.getReceiverType()),
- DescriptorRenderer.FQ_NAMES_IN_TYPES.render(object.getCallableDescriptor())
+ RENDER_TYPE.render(object.getExpectedType(), context),
+ RENDER_TYPE.render(object.getExpressionType(), context),
+ RENDER_TYPE.render(object.getReceiverType(), context),
+ FQ_NAMES_IN_TYPES.render(object.getCallableDescriptor(), context)
};
}
});
@@ -175,7 +178,7 @@ public String[] render(@NotNull TypeMismatchDueToTypeProjectionsData object) {
MAP.put(NAMED_ARGUMENTS_NOT_ALLOWED, "Named arguments are not allowed for {0}", new DiagnosticParameterRenderer<BadNamedArgumentsTarget>() {
@NotNull
@Override
- public String render(@NotNull BadNamedArgumentsTarget target) {
+ public String render(@NotNull BadNamedArgumentsTarget target, @NotNull RenderingContext context) {
switch (target) {
case NON_KOTLIN_FUNCTION:
return "non-Kotlin functions";
@@ -391,7 +394,7 @@ public String render(@NotNull BadNamedArgumentsTarget target) {
MAP.put(EXPRESSION_EXPECTED, "{0} is not an expression, and only expressions are allowed here", new DiagnosticParameterRenderer<KtExpression>() {
@NotNull
@Override
- public String render(@NotNull KtExpression expression) {
+ public String render(@NotNull KtExpression expression, @NotNull RenderingContext context) {
String expressionType = expression.toString();
return expressionType.substring(0, 1) +
expressionType.substring(1).toLowerCase();
@@ -487,7 +490,7 @@ public String render(@NotNull KtExpression expression) {
MAP.put(NAME_IN_CONSTRAINT_IS_NOT_A_TYPE_PARAMETER, "{0} does not refer to a type parameter of {1}", new DiagnosticParameterRenderer<KtTypeConstraint>() {
@NotNull
@Override
- public String render(@NotNull KtTypeConstraint typeConstraint) {
+ public String render(@NotNull KtTypeConstraint typeConstraint, @NotNull RenderingContext context) {
//noinspection ConstantConditions
return typeConstraint.getSubjectTypeParameterName().getReferencedName();
}
@@ -509,11 +512,13 @@ public String render(@NotNull KtTypeConstraint typeConstraint) {
@NotNull
@Override
public String[] render(@NotNull VarianceConflictDiagnosticData data) {
+ RenderingContext context =
+ of(data.getTypeParameter(), data.getTypeParameter().getVariance(), data.getOccurrencePosition(), data.getContainingType());
return new String[] {
- NAME.render(data.getTypeParameter()),
- RENDER_POSITION_VARIANCE.render(data.getTypeParameter().getVariance()),
- RENDER_POSITION_VARIANCE.render(data.getOccurrencePosition()),
- RENDER_TYPE.render(data.getContainingType())
+ NAME.render(data.getTypeParameter(), context),
+ RENDER_POSITION_VARIANCE.render(data.getTypeParameter().getVariance(), context),
+ RENDER_POSITION_VARIANCE.render(data.getOccurrencePosition(), context),
+ RENDER_TYPE.render(data.getContainingType(), context)
};
}
});
@@ -548,7 +553,7 @@ public String[] render(@NotNull VarianceConflictDiagnosticData data) {
MAP.put(EQUALITY_NOT_APPLICABLE, "Operator ''{0}'' cannot be applied to ''{1}'' and ''{2}''", new DiagnosticParameterRenderer<KtSimpleNameExpression>() {
@NotNull
@Override
- public String render(@NotNull KtSimpleNameExpression nameExpression) {
+ public String render(@NotNull KtSimpleNameExpression nameExpression, @NotNull RenderingContext context) {
//noinspection ConstantConditions
return nameExpression.getReferencedName();
}
@@ -603,15 +608,15 @@ public String render(@NotNull KtSimpleNameExpression nameExpression) {
ELEMENT_TEXT, new DiagnosticParameterRenderer<KotlinType>() {
@NotNull
@Override
- public String render(@NotNull KotlinType type) {
+ public String render(@NotNull KotlinType type, @NotNull RenderingContext context) {
if (type.isError()) return "";
- return " of type '" + RENDER_TYPE.render(type) + "'";
+ return " of type '" + RENDER_TYPE.render(type, context) + "'";
}
});
MAP.put(FUNCTION_CALL_EXPECTED, "Function invocation ''{0}({1})'' expected", ELEMENT_TEXT, new DiagnosticParameterRenderer<Boolean>() {
@NotNull
@Override
- public String render(@NotNull Boolean hasValueParameters) {
+ public String render(@NotNull Boolean hasValueParameters, @NotNull RenderingContext context) {
return hasValueParameters ? "..." : "";
}
});
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/DiagnosticParameterRenderer.kt b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/DiagnosticParameterRenderer.kt
index 5efb23fcc284b..1d445f643ca9e 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/DiagnosticParameterRenderer.kt
+++ b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/DiagnosticParameterRenderer.kt
@@ -17,9 +17,13 @@
package org.jetbrains.kotlin.diagnostics.rendering
interface DiagnosticParameterRenderer<in O> {
- fun render(obj: O): String
+ fun render(obj: O, renderingContext: RenderingContext): String
}
fun <O> Renderer(block: (O) -> String) = object : DiagnosticParameterRenderer<O> {
- override fun render(obj: O) = block(obj)
+ override fun render(obj: O, renderingContext: RenderingContext): String = block(obj)
+}
+
+fun <O> ContextDependentRenderer(block: (O, RenderingContext) -> String) = object : DiagnosticParameterRenderer<O> {
+ override fun render(obj: O, renderingContext: RenderingContext): String = block(obj, renderingContext)
}
\ No newline at end of file
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/DiagnosticRendererUtil.kt b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/DiagnosticRendererUtil.kt
index 644df659948e5..34a20f8d8ebc0 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/DiagnosticRendererUtil.kt
+++ b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/DiagnosticRendererUtil.kt
@@ -1,5 +1,5 @@
/*
- * Copyright 2010-2015 JetBrains s.r.o.
+ * Copyright 2010-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -19,7 +19,8 @@ package org.jetbrains.kotlin.diagnostics.rendering
import org.jetbrains.kotlin.descriptors.ClassDescriptor
import org.jetbrains.kotlin.renderer.DescriptorRenderer
-fun <P : Any> renderParameter(parameter: P, renderer: DiagnosticParameterRenderer<P>?): Any = renderer?.render(parameter) ?: parameter
+fun <P : Any> renderParameter(parameter: P, renderer: DiagnosticParameterRenderer<P>?, context: RenderingContext): Any
+ = renderer?.render(parameter, context) ?: parameter
fun ClassDescriptor.renderKindWithName(): String = DescriptorRenderer.getClassKindPrefix(this) + " '" + name + "'"
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/Renderers.kt b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/Renderers.kt
index 1703a62e53737..8bfd10ad4e0a1 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/Renderers.kt
+++ b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/Renderers.kt
@@ -1,5 +1,5 @@
/*
- * Copyright 2010-2015 JetBrains s.r.o.
+ * Copyright 2010-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -122,19 +122,20 @@ object Renderers {
@JvmField val AMBIGUOUS_CALLS = Renderer {
calls: Collection<ResolvedCall<*>> ->
- calls
- .map { it.resultingDescriptor }
+ val descriptors = calls.map { it.resultingDescriptor }
+ val context = RenderingContext.Impl(descriptors)
+ descriptors
.sortedWith(MemberComparator.INSTANCE)
- .joinToString(separator = "\n", prefix = "\n") { DescriptorRenderer.FQ_NAMES_IN_TYPES.render(it) }
+ .joinToString(separator = "\n", prefix = "\n") { FQ_NAMES_IN_TYPES.render(it, context) }
}
- @JvmStatic fun <T> commaSeparated(itemRenderer: DiagnosticParameterRenderer<T>) = Renderer<Collection<T>> {
- collection ->
+ @JvmStatic fun <T> commaSeparated(itemRenderer: DiagnosticParameterRenderer<T>) = ContextDependentRenderer<Collection<T>> {
+ collection, context ->
buildString {
val iterator = collection.iterator()
while (iterator.hasNext()) {
val next = iterator.next()
- append(itemRenderer.render(next))
+ append(itemRenderer.render(next, context))
if (iterator.hasNext()) {
append(", ")
}
@@ -166,7 +167,7 @@ object Renderers {
inferenceErrorData: InferenceErrorData, result: TabledDescriptorRenderer
): TabledDescriptorRenderer {
LOG.assertTrue(inferenceErrorData.constraintSystem.status.hasConflictingConstraints(),
- renderDebugMessage("Conflicting substitutions inference error renderer is applied for incorrect status", inferenceErrorData))
+ debugMessage("Conflicting substitutions inference error renderer is applied for incorrect status", inferenceErrorData))
val substitutedDescriptors = Lists.newArrayList<CallableDescriptor>()
val substitutors = ConstraintsUtil.getSubstitutorsForConflictingParameters(inferenceErrorData.constraintSystem)
@@ -177,7 +178,7 @@ object Renderers {
val firstConflictingVariable = ConstraintsUtil.getFirstConflictingVariable(inferenceErrorData.constraintSystem)
if (firstConflictingVariable == null) {
- LOG.error(renderDebugMessage("There is no conflicting parameter for 'conflicting constraints' error.", inferenceErrorData))
+ LOG.error(debugMessage("There is no conflicting parameter for 'conflicting constraints' error.", inferenceErrorData))
return result
}
@@ -238,7 +239,7 @@ object Renderers {
val firstUnknownVariable = inferenceErrorData.constraintSystem.typeVariables.firstOrNull { variable ->
inferenceErrorData.constraintSystem.getTypeBounds(variable).values.isEmpty()
} ?: return result.apply {
- LOG.error(renderDebugMessage("There is no unknown parameter for 'no information for parameter error'.", inferenceErrorData))
+ LOG.error(debugMessage("There is no unknown parameter for 'no information for parameter error'.", inferenceErrorData))
}
return result
@@ -256,7 +257,7 @@ object Renderers {
val constraintSystem = inferenceErrorData.constraintSystem
val status = constraintSystem.status
LOG.assertTrue(status.hasViolatedUpperBound(),
- renderDebugMessage("Upper bound violated renderer is applied for incorrect status", inferenceErrorData))
+ debugMessage("Upper bound violated renderer is applied for incorrect status", inferenceErrorData))
val systemWithoutWeakConstraints = constraintSystem.filterConstraintsOut(ConstraintPositionKind.TYPE_BOUND_POSITION)
val typeParameterDescriptor = inferenceErrorData.descriptor.typeParameters.firstOrNull {
@@ -266,15 +267,15 @@ object Renderers {
return renderConflictingSubstitutionsInferenceError(inferenceErrorData, result)
}
if (typeParameterDescriptor == null) {
- LOG.error(renderDebugMessage("There is no type parameter with violated upper bound for 'upper bound violated' error", inferenceErrorData))
+ LOG.error(debugMessage("There is no type parameter with violated upper bound for 'upper bound violated' error", inferenceErrorData))
return result
}
val typeVariable = systemWithoutWeakConstraints.descriptorToVariable(inferenceErrorData.call.toHandle(), typeParameterDescriptor)
val inferredValueForTypeParameter = systemWithoutWeakConstraints.getTypeBounds(typeVariable).value
if (inferredValueForTypeParameter == null) {
- LOG.error(renderDebugMessage("System without weak constraints is not successful, there is no value for type parameter " +
- typeParameterDescriptor.name + "\n: " + systemWithoutWeakConstraints, inferenceErrorData))
+ LOG.error(debugMessage("System without weak constraints is not successful, there is no value for type parameter " +
+ typeParameterDescriptor.name + "\n: " + systemWithoutWeakConstraints, inferenceErrorData))
return result
}
@@ -295,17 +296,19 @@ object Renderers {
}
}
if (violatedUpperBound == null) {
- LOG.error(renderDebugMessage("Type parameter (chosen as violating its upper bound)" +
- typeParameterDescriptor.name + " violates no bounds after substitution", inferenceErrorData))
+ LOG.error(debugMessage("Type parameter (chosen as violating its upper bound)" +
+ typeParameterDescriptor.name + " violates no bounds after substitution", inferenceErrorData))
return result
}
+ // TODO: context should be in fact shared for the table and these two types
+ val context = RenderingContext.of(inferredValueForTypeParameter, violatedUpperBound)
val typeRenderer = result.typeRenderer
result.text(newText()
.normal(" is not satisfied: inferred type ")
- .error(typeRenderer.render(inferredValueForTypeParameter))
+ .error(typeRenderer.render(inferredValueForTypeParameter, context))
.normal(" is not a subtype of ")
- .strong(typeRenderer.render(violatedUpperBound)))
+ .strong(typeRenderer.render(violatedUpperBound, context)))
return result
}
@@ -316,7 +319,7 @@ object Renderers {
val errors = system.status.constraintErrors
val typeVariableWithCapturedConstraint = errors.firstIsInstanceOrNull<CannotCapture>()?.typeVariable
if (typeVariableWithCapturedConstraint == null) {
- LOG.error(renderDebugMessage("An error 'cannot capture type parameter' is not found in errors", inferenceErrorData))
+ LOG.error(debugMessage("An error 'cannot capture type parameter' is not found in errors", inferenceErrorData))
return result
}
@@ -324,7 +327,7 @@ object Renderers {
val boundWithCapturedType = typeBounds.bounds.firstOrNull { it.constrainingType.isCaptured() }
val capturedTypeConstructor = boundWithCapturedType?.constrainingType?.constructor as? CapturedTypeConstructor
if (capturedTypeConstructor == null) {
- LOG.error(renderDebugMessage("There is no captured type in bounds, but there is an error 'cannot capture type parameter'", inferenceErrorData))
+ LOG.error(debugMessage("There is no captured type in bounds, but there is an error 'cannot capture type parameter'", inferenceErrorData))
return result
}
@@ -333,7 +336,7 @@ object Renderers {
val explanation: String
val upperBound = TypeIntersector.getUpperBoundsAsType(typeParameter)
if (!KotlinBuiltIns.isNullableAny(upperBound) && capturedTypeConstructor.typeProjection.projectionKind == Variance.IN_VARIANCE) {
- explanation = "Type parameter has an upper bound '" + result.typeRenderer.render(upperBound) + "'" +
+ explanation = "Type parameter has an upper bound '" + result.typeRenderer.render(upperBound, RenderingContext.of(upperBound)) + "'" +
" that cannot be satisfied capturing 'in' projection"
}
else {
@@ -365,24 +368,20 @@ object Renderers {
}
}
- private fun renderTypes(types: Collection<KotlinType>) = StringUtil.join(types, { RENDER_TYPE.render(it) }, ", ")
+ private fun renderTypes(types: Collection<KotlinType>, context: RenderingContext) = StringUtil.join(types, { RENDER_TYPE.render(it, context) }, ", ")
- @JvmField val RENDER_COLLECTION_OF_TYPES = Renderer<Collection<KotlinType>> { renderTypes(it) }
+ @JvmField val RENDER_COLLECTION_OF_TYPES = ContextDependentRenderer<Collection<KotlinType>> { types, context -> renderTypes(types, context) }
- private fun renderConstraintSystem(constraintSystem: ConstraintSystem, renderTypeBounds: DiagnosticParameterRenderer<TypeBounds>): String {
+ fun renderConstraintSystem(constraintSystem: ConstraintSystem, shortTypeBounds: Boolean): String {
val typeBounds = linkedSetOf<TypeBounds>()
for (variable in constraintSystem.typeVariables) {
typeBounds.add(constraintSystem.getTypeBounds(variable))
}
return "type parameter bounds:\n" +
- StringUtil.join(typeBounds, { renderTypeBounds.render(it) }, "\n") + "\n\n" + "status:\n" +
+ StringUtil.join(typeBounds, { renderTypeBounds(it, short = shortTypeBounds) }, "\n") + "\n\n" + "status:\n" +
ConstraintsUtil.getDebugMessageForStatus(constraintSystem.status)
}
- @JvmField val RENDER_CONSTRAINT_SYSTEM = Renderer<ConstraintSystem> { renderConstraintSystem(it, RENDER_TYPE_BOUNDS) }
-
- @JvmField val RENDER_CONSTRAINT_SYSTEM_SHORT = Renderer<ConstraintSystem> { renderConstraintSystem(it, RENDER_TYPE_BOUNDS_SHORT) }
-
private fun renderTypeBounds(typeBounds: TypeBounds, short: Boolean): String {
val renderBound = { bound: Bound ->
val arrow = if (bound.kind == LOWER_BOUND) ">: " else if (bound.kind == UPPER_BOUND) "<: " else ":= "
@@ -398,28 +397,25 @@ object Renderers {
"$typeVariableName ${StringUtil.join(typeBounds.bounds, renderBound, ", ")}"
}
- @JvmField val RENDER_TYPE_BOUNDS = Renderer<TypeBounds> { renderTypeBounds(it, short = false) }
-
- @JvmField val RENDER_TYPE_BOUNDS_SHORT = Renderer<TypeBounds> { renderTypeBounds(it, short = true) }
-
- private fun renderDebugMessage(message: String, inferenceErrorData: InferenceErrorData) = buildString {
+ private fun debugMessage(message: String, inferenceErrorData: InferenceErrorData) = buildString {
append(message)
append("\nConstraint system: \n")
- append(RENDER_CONSTRAINT_SYSTEM.render(inferenceErrorData.constraintSystem))
+ append(renderConstraintSystem(inferenceErrorData.constraintSystem, false))
append("\nDescriptor:\n")
append(inferenceErrorData.descriptor)
append("\nExpected type:\n")
+ val context = RenderingContext.Empty
if (TypeUtils.noExpectedType(inferenceErrorData.expectedType)) {
append(inferenceErrorData.expectedType)
}
else {
- append(RENDER_TYPE.render(inferenceErrorData.expectedType))
+ append(RENDER_TYPE.render(inferenceErrorData.expectedType, context))
}
append("\nArgument types:\n")
if (inferenceErrorData.receiverArgumentType != null) {
- append(RENDER_TYPE.render(inferenceErrorData.receiverArgumentType)).append(".")
+ append(RENDER_TYPE.render(inferenceErrorData.receiverArgumentType, context)).append(".")
}
- append("(").append(renderTypes(inferenceErrorData.valueArgumentsTypes)).append(")")
+ append("(").append(renderTypes(inferenceErrorData.valueArgumentsTypes, context)).append(")")
}
private val WHEN_MISSING_LIMIT = 7
@@ -448,4 +444,4 @@ object Renderers {
fun DescriptorRenderer.asRenderer() = Renderer<DeclarationDescriptor> {
render(it)
-}
\ No newline at end of file
+}
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/RenderingContext.kt b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/RenderingContext.kt
new file mode 100644
index 0000000000000..0c2c56b5e2587
--- /dev/null
+++ b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/RenderingContext.kt
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2010-2016 JetBrains s.r.o.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.jetbrains.kotlin.diagnostics.rendering
+
+import org.jetbrains.kotlin.diagnostics.*
+
+// holds data about the parameters of the diagnostic we're about to render
+sealed class RenderingContext {
+ abstract operator fun <T> get(key: Key<T>): T
+
+ abstract class Key<T>(val name: String) {
+ abstract fun compute(objectsToRender: Collection<Any?>): T
+ }
+
+ class Impl(private val objectsToRender: Collection<Any?>) : RenderingContext() {
+ private val data = linkedMapOf<Key<*>, Any?>()
+
+ override fun <T> get(key: Key<T>): T {
+ if (!data.containsKey(key)) {
+ val result = key.compute(objectsToRender)
+ data[key] = result
+ return result
+ }
+ return data[key] as T
+ }
+ }
+
+
+ object Empty : RenderingContext() {
+ override fun <T> get(key: Key<T>): T {
+ return key.compute(emptyList())
+ }
+ }
+
+ companion object {
+ @JvmStatic
+ fun of(vararg objectsToRender: Any?): RenderingContext {
+ return Impl(objectsToRender.toList())
+ }
+
+ @JvmStatic
+ fun fromDiagnostic(d: Diagnostic): RenderingContext {
+ val parameters = when (d) {
+ is SimpleDiagnostic<*> -> listOf()
+ is DiagnosticWithParameters1<*, *> -> listOf(d.a)
+ is DiagnosticWithParameters2<*, *, *> -> listOf(d.a, d.b)
+ is DiagnosticWithParameters3<*, *, *, *> -> listOf(d.a, d.b, d.c)
+ is ParametrizedDiagnostic<*> -> error("Unexpected diagnostic: ${d.javaClass}")
+ else -> listOf()
+ }
+ return Impl(parameters)
+ }
+ }
+}
\ No newline at end of file
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/TabledDescriptorRenderer.java b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/TabledDescriptorRenderer.java
index f9677583991b4..05e5c1eb89639 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/TabledDescriptorRenderer.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/TabledDescriptorRenderer.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2010-2015 JetBrains s.r.o.
+ * Copyright 2010-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -26,10 +26,10 @@
import org.jetbrains.kotlin.diagnostics.rendering.TabledDescriptorRenderer.TableRenderer.FunctionArgumentsRow;
import org.jetbrains.kotlin.diagnostics.rendering.TabledDescriptorRenderer.TableRenderer.TableRow;
import org.jetbrains.kotlin.diagnostics.rendering.TabledDescriptorRenderer.TextRenderer.TextElement;
-import org.jetbrains.kotlin.renderer.DescriptorRenderer;
import org.jetbrains.kotlin.resolve.calls.inference.constraintPosition.ConstraintPosition;
import org.jetbrains.kotlin.types.KotlinType;
+import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
@@ -167,26 +167,33 @@ protected void renderText(TextRenderer textRenderer, StringBuilder result) {
protected void renderTable(TableRenderer table, StringBuilder result) {
if (table.rows.isEmpty()) return;
+
+ RenderingContext context = computeRenderingContext(table);
for (TableRow row : table.rows) {
if (row instanceof TextRenderer) {
renderText((TextRenderer) row, result);
}
if (row instanceof DescriptorRow) {
- result.append(DescriptorRenderer.COMPACT.render(((DescriptorRow) row).descriptor));
+ result.append(Renderers.COMPACT.render(((DescriptorRow) row).descriptor, context));
}
if (row instanceof FunctionArgumentsRow) {
FunctionArgumentsRow functionArgumentsRow = (FunctionArgumentsRow) row;
- renderFunctionArguments(functionArgumentsRow.receiverType, functionArgumentsRow.argumentTypes, result);
+ renderFunctionArguments(functionArgumentsRow.receiverType, functionArgumentsRow.argumentTypes, result, context);
}
result.append("\n");
}
}
- private void renderFunctionArguments(@Nullable KotlinType receiverType, @NotNull List<KotlinType> argumentTypes, StringBuilder result) {
+ private void renderFunctionArguments(
+ @Nullable KotlinType receiverType,
+ @NotNull List<KotlinType> argumentTypes,
+ StringBuilder result,
+ @NotNull RenderingContext context
+ ) {
boolean hasReceiver = receiverType != null;
if (hasReceiver) {
result.append("receiver: ");
- result.append(getTypeRenderer().render(receiverType));
+ result.append(getTypeRenderer().render(receiverType, context));
result.append(" arguments: ");
}
if (argumentTypes.isEmpty()) {
@@ -197,7 +204,7 @@ private void renderFunctionArguments(@Nullable KotlinType receiverType, @NotNull
result.append("(");
for (Iterator<KotlinType> iterator = argumentTypes.iterator(); iterator.hasNext(); ) {
KotlinType argumentType = iterator.next();
- String renderedArgument = getTypeRenderer().render(argumentType);
+ String renderedArgument = getTypeRenderer().render(argumentType, context);
result.append(renderedArgument);
if (iterator.hasNext()) {
@@ -212,4 +219,25 @@ public static TabledDescriptorRenderer create() {
}
public static enum TextElementType { STRONG, ERROR, DEFAULT }
+
+ @NotNull
+ protected static RenderingContext computeRenderingContext(@NotNull TableRenderer table) {
+ ArrayList<Object> toRender = new ArrayList<Object>();
+ for (TableRow row : table.rows) {
+ if (row instanceof DescriptorRow) {
+ toRender.add(((DescriptorRow) row).descriptor);
+ }
+ else if (row instanceof FunctionArgumentsRow) {
+ toRender.add(((FunctionArgumentsRow) row).receiverType);
+ toRender.addAll(((FunctionArgumentsRow) row).argumentTypes);
+ }
+ else if (row instanceof TextRenderer) {
+
+ }
+ else {
+ throw new AssertionError("Unknown row of type " + row.getClass());
+ }
+ }
+ return new RenderingContext.Impl(toRender);
+ }
}
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/diagnosticsWithParameterRenderers.kt b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/diagnosticsWithParameterRenderers.kt
index 42b567ed62c05..7aeccb372812d 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/diagnosticsWithParameterRenderers.kt
+++ b/compiler/frontend/src/org/jetbrains/kotlin/diagnostics/rendering/diagnosticsWithParameterRenderers.kt
@@ -42,10 +42,9 @@ class DiagnosticWithParameters1Renderer<A : Any>(
) : AbstractDiagnosticWithParametersRenderer<DiagnosticWithParameters1<*, A>>(message) {
override fun renderParameters(diagnostic: DiagnosticWithParameters1<*, A>): Array<out Any> {
- return arrayOf(renderParameter(diagnostic.a, rendererForA))
+ val context = RenderingContext.of(diagnostic.a)
+ return arrayOf(renderParameter(diagnostic.a, rendererForA, context))
}
-
-
}
class DiagnosticWithParameters2Renderer<A : Any, B : Any>(
@@ -55,9 +54,10 @@ class DiagnosticWithParameters2Renderer<A : Any, B : Any>(
) : AbstractDiagnosticWithParametersRenderer<DiagnosticWithParameters2<*, A, B>>(message) {
override fun renderParameters(diagnostic: DiagnosticWithParameters2<*, A, B>): Array<out Any> {
+ val context = RenderingContext.of(diagnostic.a, diagnostic.b)
return arrayOf(
- renderParameter(diagnostic.a, rendererForA),
- renderParameter(diagnostic.b, rendererForB)
+ renderParameter(diagnostic.a, rendererForA, context),
+ renderParameter(diagnostic.b, rendererForB, context)
)
}
}
@@ -70,10 +70,11 @@ class DiagnosticWithParameters3Renderer<A : Any, B : Any, C : Any>(
) : AbstractDiagnosticWithParametersRenderer<DiagnosticWithParameters3<*, A, B, C>>(message) {
override fun renderParameters(diagnostic: DiagnosticWithParameters3<*, A, B, C>): Array<out Any> {
+ val context = RenderingContext.of(diagnostic.a, diagnostic.b, diagnostic.c)
return arrayOf(
- renderParameter(diagnostic.a, rendererForA),
- renderParameter(diagnostic.b, rendererForB),
- renderParameter(diagnostic.c, rendererForC)
+ renderParameter(diagnostic.a, rendererForA, context),
+ renderParameter(diagnostic.b, rendererForB, context),
+ renderParameter(diagnostic.c, rendererForC, context)
)
}
}
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/DelegatedPropertyResolver.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/DelegatedPropertyResolver.java
index b297cd15d34a0..5e8c2620c996c 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/DelegatedPropertyResolver.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/DelegatedPropertyResolver.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2010-2015 JetBrains s.r.o.
+ * Copyright 2010-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,15 +17,17 @@
package org.jetbrains.kotlin.resolve;
import com.google.common.collect.Lists;
+import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
+import com.intellij.util.Function;
import kotlin.Pair;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.kotlin.builtins.KotlinBuiltIns;
import org.jetbrains.kotlin.descriptors.*;
-import org.jetbrains.kotlin.diagnostics.rendering.Renderers;
import org.jetbrains.kotlin.name.Name;
import org.jetbrains.kotlin.psi.*;
+import org.jetbrains.kotlin.renderer.DescriptorRenderer;
import org.jetbrains.kotlin.resolve.calls.callUtil.CallUtilKt;
import org.jetbrains.kotlin.resolve.calls.inference.ConstraintSystem;
import org.jetbrains.kotlin.resolve.calls.inference.ConstraintSystemCompleter;
@@ -34,8 +36,8 @@
import org.jetbrains.kotlin.resolve.calls.results.OverloadResolutionResults;
import org.jetbrains.kotlin.resolve.calls.smartcasts.DataFlowInfo;
import org.jetbrains.kotlin.resolve.calls.smartcasts.DataFlowInfoFactory;
-import org.jetbrains.kotlin.resolve.scopes.ScopeUtils;
import org.jetbrains.kotlin.resolve.scopes.LexicalScope;
+import org.jetbrains.kotlin.resolve.scopes.ScopeUtils;
import org.jetbrains.kotlin.resolve.scopes.receivers.ExpressionReceiver;
import org.jetbrains.kotlin.resolve.validation.OperatorValidator;
import org.jetbrains.kotlin.resolve.validation.SymbolUsageValidator;
@@ -308,7 +310,13 @@ private static String renderCall(@NotNull Call call, @NotNull BindingContext con
argumentTypes.add(context.getType(argument.getArgumentExpression()));
}
- builder.append(Renderers.RENDER_COLLECTION_OF_TYPES.render(argumentTypes));
+ String arguments = StringUtil.join(argumentTypes, new Function<KotlinType, String>() {
+ @Override
+ public String fun(KotlinType type) {
+ return DescriptorRenderer.FQ_NAMES_IN_TYPES.renderType(type);
+ }
+ }, ", ");
+ builder.append(arguments);
builder.append(")");
return builder.toString();
}
diff --git a/compiler/tests/org/jetbrains/kotlin/resolve/constraintSystem/AbstractConstraintSystemTest.kt b/compiler/tests/org/jetbrains/kotlin/resolve/constraintSystem/AbstractConstraintSystemTest.kt
index 53f7dcdac837a..245a6245b83d0 100644
--- a/compiler/tests/org/jetbrains/kotlin/resolve/constraintSystem/AbstractConstraintSystemTest.kt
+++ b/compiler/tests/org/jetbrains/kotlin/resolve/constraintSystem/AbstractConstraintSystemTest.kt
@@ -1,5 +1,5 @@
/*
- * Copyright 2010-2015 JetBrains s.r.o.
+ * Copyright 2010-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -110,7 +110,7 @@ abstract class AbstractConstraintSystemTest() : KotlinLiteFixture() {
val system = builder.build()
- val resultingStatus = Renderers.RENDER_CONSTRAINT_SYSTEM_SHORT.render(system)
+ val resultingStatus = Renderers.renderConstraintSystem(system, shortTypeBounds = true)
val resultingSubstitutor = system.resultingSubstitutor
val result = typeParameterDescriptors.map {
diff --git a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/highlighter/HtmlTabledDescriptorRenderer.java b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/highlighter/HtmlTabledDescriptorRenderer.java
index 1681670d7e11c..737fb172390ec 100644
--- a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/highlighter/HtmlTabledDescriptorRenderer.java
+++ b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/highlighter/HtmlTabledDescriptorRenderer.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2010-2015 JetBrains s.r.o.
+ * Copyright 2010-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -23,6 +23,7 @@
import org.jetbrains.annotations.Nullable;
import org.jetbrains.kotlin.descriptors.ValueParameterDescriptor;
import org.jetbrains.kotlin.diagnostics.rendering.DiagnosticParameterRenderer;
+import org.jetbrains.kotlin.diagnostics.rendering.RenderingContext;
import org.jetbrains.kotlin.diagnostics.rendering.TabledDescriptorRenderer;
import org.jetbrains.kotlin.diagnostics.rendering.TabledDescriptorRenderer.TableRenderer.DescriptorRow;
import org.jetbrains.kotlin.diagnostics.rendering.TabledDescriptorRenderer.TableRenderer.FunctionArgumentsRow;
@@ -94,9 +95,10 @@ else if (row instanceof FunctionArgumentsRow) {
@Override
protected void renderTable(TableRenderer table, StringBuilder result) {
if (table.rows.isEmpty()) return;
- int rowsNumber = countColumnNumber(table);
+ RenderingContext context = computeRenderingContext(table);
+ int rowsNumber = countColumnNumber(table);
result.append("<table>");
for (TableRow row : table.rows) {
result.append("<tr>");
@@ -111,7 +113,7 @@ protected void renderTable(TableRenderer table, StringBuilder result) {
}
if (row instanceof FunctionArgumentsRow) {
FunctionArgumentsRow functionArgumentsRow = (FunctionArgumentsRow) row;
- renderFunctionArguments(functionArgumentsRow.receiverType, functionArgumentsRow.argumentTypes, functionArgumentsRow.isErrorPosition, result);
+ renderFunctionArguments(functionArgumentsRow.receiverType, functionArgumentsRow.argumentTypes, functionArgumentsRow.isErrorPosition, result, context);
}
result.append("</tr>");
}
@@ -124,7 +126,8 @@ private void renderFunctionArguments(
@Nullable KotlinType receiverType,
@NotNull List<KotlinType> argumentTypes,
Predicate<ConstraintPosition> isErrorPosition,
- StringBuilder result
+ StringBuilder result,
+ @NotNull RenderingContext context
) {
boolean hasReceiver = receiverType != null;
tdSpace(result);
@@ -134,7 +137,7 @@ private void renderFunctionArguments(
if (isErrorPosition.apply(RECEIVER_POSITION.position())) {
error = true;
}
- receiver = "receiver: " + RenderersUtilKt.renderStrong(getTypeRenderer().render(receiverType), error);
+ receiver = "receiver: " + RenderersUtilKt.renderStrong(getTypeRenderer().render(receiverType, context), error);
}
td(result, receiver);
td(result, hasReceiver ? "arguments: " : "");
@@ -151,7 +154,7 @@ private void renderFunctionArguments(
if (isErrorPosition.apply(VALUE_PARAMETER_POSITION.position(i))) {
error = true;
}
- String renderedArgument = getTypeRenderer().render(argumentType);
+ String renderedArgument = getTypeRenderer().render(argumentType, context);
tdRight(result, RenderersUtilKt.renderStrong(renderedArgument, error) + (iterator.hasNext() ? RenderersUtilKt.renderStrong(",") : ""));
i++;
diff --git a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/highlighter/IdeErrorMessages.java b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/highlighter/IdeErrorMessages.java
index 941e062eba6c5..dc856c556b7c7 100644
--- a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/highlighter/IdeErrorMessages.java
+++ b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/highlighter/IdeErrorMessages.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2010-2015 JetBrains s.r.o.
+ * Copyright 2010-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -71,11 +71,13 @@ public static boolean hasIdeSpecificMessage(@NotNull Diagnostic diagnostic) {
@NotNull
@Override
public String[] render(@NotNull TypeMismatchDueToTypeProjectionsData object) {
+ RenderingContext context = RenderingContext
+ .of(object.getExpectedType(), object.getExpressionType(), object.getReceiverType(), object.getCallableDescriptor());
return new String[] {
- HTML_RENDER_TYPE.render(object.getExpectedType()),
- HTML_RENDER_TYPE.render(object.getExpressionType()),
- HTML_RENDER_TYPE.render(object.getReceiverType()),
- HTML.render(object.getCallableDescriptor())
+ HTML_RENDER_TYPE.render(object.getExpectedType(), context),
+ HTML_RENDER_TYPE.render(object.getExpressionType(), context),
+ HTML_RENDER_TYPE.render(object.getReceiverType(), context),
+ HTML.render(object.getCallableDescriptor(), context)
};
}
});
diff --git a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/highlighter/IdeRenderers.kt b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/highlighter/IdeRenderers.kt
index b21f0747335d3..38d14e749071c 100644
--- a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/highlighter/IdeRenderers.kt
+++ b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/highlighter/IdeRenderers.kt
@@ -1,5 +1,5 @@
/*
- * Copyright 2010-2015 JetBrains s.r.o.
+ * Copyright 2010-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,6 +17,7 @@
package org.jetbrains.kotlin.idea.highlighter
import org.jetbrains.kotlin.descriptors.CallableMemberDescriptor
+import org.jetbrains.kotlin.diagnostics.rendering.ContextDependentRenderer
import org.jetbrains.kotlin.diagnostics.rendering.Renderer
import org.jetbrains.kotlin.diagnostics.rendering.Renderers
import org.jetbrains.kotlin.diagnostics.rendering.asRenderer
@@ -67,28 +68,29 @@ object IdeRenderers {
Renderers.renderUpperBoundViolatedInferenceError(it, HtmlTabledDescriptorRenderer.create()).toString()
}
- @JvmField val HTML_RENDER_RETURN_TYPE = Renderer<CallableMemberDescriptor> {
- val returnType = it.returnType!!
- DescriptorRenderer.HTML.renderType(returnType)
+ @JvmField val HTML_RENDER_RETURN_TYPE = ContextDependentRenderer<CallableMemberDescriptor> {
+ member, context ->
+ HTML_RENDER_TYPE.render(member.returnType!!, context)
}
@JvmField val HTML_COMPACT_WITH_MODIFIERS = DescriptorRenderer.HTML.withOptions {
withDefinedIn = false
}.asRenderer()
- @JvmField val HTML_CONFLICTING_JVM_DECLARATIONS_DATA = Renderer {
- data: ConflictingJvmDeclarationsData ->
+ @JvmField val HTML_CONFLICTING_JVM_DECLARATIONS_DATA = ContextDependentRenderer {
+ data: ConflictingJvmDeclarationsData, renderingContext ->
val conflicts = data.signatureOrigins
- .mapNotNull { it.descriptor }
- .sortedWith(MemberComparator.INSTANCE)
- .joinToString("") { "<li>" + HTML_COMPACT_WITH_MODIFIERS.render(it) + "</li>\n" }
+ .mapNotNull { it.descriptor }
+ .sortedWith(MemberComparator.INSTANCE)
+ .joinToString("") { "<li>" + HTML_COMPACT_WITH_MODIFIERS.render(it, renderingContext) + "</li>\n" }
"The following declarations have the same JVM signature (<code>${data.signature.name}${data.signature.desc}</code>):<br/>\n<ul>\n$conflicts</ul>"
}
- @JvmField val HTML_THROWABLE = Renderer<Throwable> {
- Renderers.THROWABLE.render(it).replace("\n", "<br/>")
+ @JvmField val HTML_THROWABLE = ContextDependentRenderer<Throwable> {
+ throwable, context ->
+ Renderers.THROWABLE.render(throwable, context).replace("\n", "<br/>")
}
@JvmField val HTML = DescriptorRenderer.HTML.asRenderer()
diff --git a/js/js.frontend/src/org/jetbrains/kotlin/js/resolve/diagnostics/jsRenderers.kt b/js/js.frontend/src/org/jetbrains/kotlin/js/resolve/diagnostics/jsRenderers.kt
index 66a5984a21b68..edf40981acc28 100644
--- a/js/js.frontend/src/org/jetbrains/kotlin/js/resolve/diagnostics/jsRenderers.kt
+++ b/js/js.frontend/src/org/jetbrains/kotlin/js/resolve/diagnostics/jsRenderers.kt
@@ -1,5 +1,5 @@
/*
- * Copyright 2010-2015 JetBrains s.r.o.
+ * Copyright 2010-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -19,9 +19,10 @@ package org.jetbrains.kotlin.js.resolve.diagnostics
import com.google.gwt.dev.js.rhino.Utils.isEndOfLine
import com.intellij.psi.PsiElement
import org.jetbrains.kotlin.diagnostics.rendering.DiagnosticParameterRenderer
+import org.jetbrains.kotlin.diagnostics.rendering.RenderingContext
object RenderFirstLineOfElementText : DiagnosticParameterRenderer<PsiElement> {
- override fun render(element: PsiElement): String {
+ override fun render(element: PsiElement, context: RenderingContext): String {
val text = element.text
val index = text.indexOf('\n')
return if (index == -1) text else text.substring(0, index) + "..."
@@ -31,7 +32,7 @@ object RenderFirstLineOfElementText : DiagnosticParameterRenderer<PsiElement> {
abstract class JsCallDataRenderer : DiagnosticParameterRenderer<JsCallData> {
protected abstract fun format(data: JsCallDataWithCode): String
- override fun render(data: JsCallData): String =
+ override fun render(data: JsCallData, context: RenderingContext): String =
when (data) {
is JsCallDataWithCode -> format(data)
is JsCallData -> data.message
|
7b7ab09e22500c6140fafbf59e46c5d3843571c7
|
camel
|
CAMEL-1842 Added OSGi integration test for- camel-mail and camel-cxf--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@795813 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/camel
|
diff --git a/examples/camel-example-reportincident/pom.xml b/examples/camel-example-reportincident/pom.xml
index ec1468cc86253..a314c4cc51c77 100755
--- a/examples/camel-example-reportincident/pom.xml
+++ b/examples/camel-example-reportincident/pom.xml
@@ -31,89 +31,157 @@
and send as emails to a backing system
</description>
<packaging>war</packaging>
+
+ <repositories>
+ <repository>
+ <id>ops4j.releases</id>
+ <url>http://repository.ops4j.org/maven2</url>
+ <snapshots>
+ <enabled>false</enabled>
+ </snapshots>
+ </repository>
+ <repository>
+ <id>ops4j.snapshots</id>
+ <url>http://repository.ops4j.org/mvn-snapshots</url>
+ <snapshots>
+ <enabled>true</enabled>
+ </snapshots>
+ </repository>
+ <repository>
+ <id>aqute.biz</id>
+ <url>http://www.aqute.biz/repo</url>
+ </repository>
+ </repositories>
- <dependencies>
- <dependency>
- <groupId>org.apache.camel</groupId>
- <artifactId>camel-core</artifactId>
- </dependency>
+ <pluginRepositories>
+ <pluginRepository>
+ <id>ops4j.releases</id>
+ <url>http://repository.ops4j.org/maven2</url>
+ <snapshots>
+ <enabled>false</enabled>
+ </snapshots>
+ </pluginRepository>
+ </pluginRepositories>
- <dependency>
- <groupId>org.apache.camel</groupId>
- <artifactId>camel-spring</artifactId>
- </dependency>
+ <properties>
+ <pax-exam-version>0.6.0</pax-exam-version>
+ <pax-tiny-bundle-version>1.0.0</pax-tiny-bundle-version>
+ </properties>
- <dependency>
- <groupId>org.apache.camel</groupId>
- <artifactId>camel-cxf</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.camel</groupId>
- <artifactId>camel-velocity</artifactId>
- </dependency>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.camel</groupId>
+ <artifactId>camel-core</artifactId>
+ </dependency>
- <dependency>
- <groupId>org.apache.camel</groupId>
- <artifactId>camel-mail</artifactId>
- </dependency>
+ <dependency>
+ <groupId>org.apache.camel</groupId>
+ <artifactId>camel-spring</artifactId>
+ </dependency>
- <!-- cxf -->
- <dependency>
- <groupId>org.apache.cxf</groupId>
- <artifactId>cxf-rt-core</artifactId>
- <version>${cxf-version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.cxf</groupId>
- <artifactId>cxf-rt-frontend-jaxws</artifactId>
- <version>${cxf-version}</version>
- </dependency>
+ <dependency>
+ <groupId>org.apache.camel</groupId>
+ <artifactId>camel-cxf</artifactId>
+ </dependency>
- <!-- regular http transport -->
- <dependency>
- <groupId>org.apache.cxf</groupId>
- <artifactId>cxf-rt-transports-http</artifactId>
- <version>${cxf-version}</version>
- </dependency>
-
- <!-- logging -->
- <dependency>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- <version>1.2.14</version>
- </dependency>
+ <dependency>
+ <groupId>org.apache.camel</groupId>
+ <artifactId>camel-velocity</artifactId>
+ </dependency>
- <!-- cxf web container for unit testing -->
- <dependency>
- <groupId>org.apache.cxf</groupId>
- <artifactId>cxf-rt-transports-http-jetty</artifactId>
- <version>${cxf-version}</version>
- </dependency>
+ <dependency>
+ <groupId>org.apache.camel</groupId>
+ <artifactId>camel-mail</artifactId>
+ </dependency>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>3.8.2</version>
- <scope>test</scope>
- </dependency>
+ <!-- cxf -->
+ <dependency>
+ <groupId>org.apache.cxf</groupId>
+ <artifactId>cxf-rt-core</artifactId>
+ <version>${cxf-version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.cxf</groupId>
+ <artifactId>cxf-rt-frontend-jaxws</artifactId>
+ <version>${cxf-version}</version>
+ </dependency>
- <!-- unit testing mail using mock -->
- <dependency>
- <groupId>org.jvnet.mock-javamail</groupId>
- <artifactId>mock-javamail</artifactId>
- <version>1.7</version>
- <scope>test</scope>
- </dependency>
+ <!-- regular http transport -->
+ <dependency>
+ <groupId>org.apache.cxf</groupId>
+ <artifactId>cxf-rt-transports-http</artifactId>
+ <version>${cxf-version}</version>
+ </dependency>
- <dependency>
- <groupId>org.apache.camel</groupId>
- <artifactId>camel-core</artifactId>
- <scope>test</scope>
- <type>test-jar</type>
- </dependency>
+ <!-- logging -->
+ <dependency>
+ <groupId>log4j</groupId>
+ <artifactId>log4j</artifactId>
+ <version>1.2.14</version>
+ </dependency>
+
+ <!-- cxf web container for unit testing -->
+ <dependency>
+ <groupId>org.apache.cxf</groupId>
+ <artifactId>cxf-rt-transports-http-jetty</artifactId>
+ <version>${cxf-version}</version>
+ </dependency>
- </dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.ops4j.pax.exam</groupId>
+ <artifactId>pax-exam</artifactId>
+ <version>${pax-exam-version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.ops4j.pax.exam</groupId>
+ <artifactId>pax-exam-junit</artifactId>
+ <version>${pax-exam-version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.ops4j.pax.exam</groupId>
+ <artifactId>pax-exam-container-default</artifactId>
+ <version>${pax-exam-version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.ops4j.pax.exam</groupId>
+ <artifactId>pax-exam-junit-extender-impl</artifactId>
+ <version>${pax-exam-version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.ops4j.pax.swissbox</groupId>
+ <artifactId>pax-swissbox-tinybundles</artifactId>
+ <version>1.0.0</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.camel.karaf</groupId>
+ <artifactId>features</artifactId>
+ <version>${version}</version>
+ <type>pom</type>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.camel</groupId>
+ <artifactId>camel-osgi</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.camel.tests</groupId>
+ <artifactId>org.apache.camel.tests.mock-javamail_1.7</artifactId>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
<build>
<plugins>
@@ -127,7 +195,20 @@
<target>1.5</target>
</configuration>
</plugin>
-
+
+ <plugin>
+ <groupId>org.apache.servicemix.tooling</groupId>
+ <artifactId>depends-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>generate-depends-file</id>
+ <goals>
+ <goal>generate-depends-file</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+
<!-- CXF wsdl2java generator, will plugin to the compile goal -->
<plugin>
<groupId>org.apache.cxf</groupId>
@@ -141,7 +222,7 @@
<sourceRoot>${basedir}/target/generated/src/main/java</sourceRoot>
<wsdlOptions>
<wsdlOption>
- <wsdl>${basedir}/src/main/resources/report_incident.wsdl</wsdl>
+ <wsdl>${basedir}/src/main/resources/etc/report_incident.wsdl</wsdl>
</wsdlOption>
</wsdlOptions>
</configuration>
@@ -167,6 +248,16 @@
</configuration>
</plugin>
+ <plugin>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <excludes>
+ <!-- TODO: temporary disable unit test to let TC not hang -->
+ <exclude>**/*OSGiTest.*</exclude>
+ </excludes>
+ </configuration>
+ </plugin>
+
</plugins>
</build>
diff --git a/examples/camel-example-reportincident/src/main/java/org/apache/camel/example/reportincident/ReportIncidentRoutes.java b/examples/camel-example-reportincident/src/main/java/org/apache/camel/example/reportincident/ReportIncidentRoutes.java
index 49a4d40e32c81..a8d49efd54319 100755
--- a/examples/camel-example-reportincident/src/main/java/org/apache/camel/example/reportincident/ReportIncidentRoutes.java
+++ b/examples/camel-example-reportincident/src/main/java/org/apache/camel/example/reportincident/ReportIncidentRoutes.java
@@ -48,7 +48,7 @@ public void configure() throws Exception {
}
String cxfEndpoint = cxfEndpointAddress
+ "?serviceClass=org.apache.camel.example.reportincident.ReportIncidentEndpoint"
- + "&wsdlURL=report_incident.wsdl";
+ + "&wsdlURL=etc/report_incident.wsdl";
// first part from the webservice -> file backup
from(cxfEndpoint)
@@ -57,7 +57,7 @@ public void configure() throws Exception {
// then set the file name using the FilenameGenerator bean
.setHeader(Exchange.FILE_NAME, BeanLanguage.bean(FilenameGenerator.class, "generateFilename"))
// and create the mail body using velocity template
- .to("velocity:MailBody.vm")
+ .to("velocity:etc/MailBody.vm")
// and store the file
.to("file://target/subfolder")
// return OK as response
diff --git a/examples/camel-example-reportincident/src/main/resources/META-INF/spring/camel-context.xml b/examples/camel-example-reportincident/src/main/resources/META-INF/spring/camel-context.xml
index 288646a52274b..b19fd2f8af620 100644
--- a/examples/camel-example-reportincident/src/main/resources/META-INF/spring/camel-context.xml
+++ b/examples/camel-example-reportincident/src/main/resources/META-INF/spring/camel-context.xml
@@ -31,7 +31,7 @@
<cxf:cxfEndpoint id="reportIncident"
address="http://localhost:9080/camel-example-reportincident/webservices/incident"
- wsdlURL="report_incident.wsdl"
+ wsdlURL="etc/report_incident.wsdl"
serviceClass="org.apache.camel.example.reportincident.ReportIncidentEndpoint">
</cxf:cxfEndpoint>
@@ -46,7 +46,7 @@
<camel:setHeader headerName="CamelFileName">
<camel:method bean="filenameGenerator" method="generateFilename" />
</camel:setHeader>
- <camel:to uri="velocity:MailBody.vm"/>
+ <camel:to uri="velocity:etc/MailBody.vm"/>
<camel:to uri="file://target/subfolder"/>
<camel:transform>
<camel:method bean="myBean" method="getOK" />
diff --git a/examples/camel-example-reportincident/src/main/resources/MailBody.vm b/examples/camel-example-reportincident/src/main/resources/etc/MailBody.vm
similarity index 100%
rename from examples/camel-example-reportincident/src/main/resources/MailBody.vm
rename to examples/camel-example-reportincident/src/main/resources/etc/MailBody.vm
diff --git a/examples/camel-example-reportincident/src/main/resources/report_incident.wsdl b/examples/camel-example-reportincident/src/main/resources/etc/report_incident.wsdl
similarity index 100%
rename from examples/camel-example-reportincident/src/main/resources/report_incident.wsdl
rename to examples/camel-example-reportincident/src/main/resources/etc/report_incident.wsdl
diff --git a/examples/camel-example-reportincident/src/test/java/org/apache/camel/example/reportincident/ReportIncidentRoutesOSGiTest.java b/examples/camel-example-reportincident/src/test/java/org/apache/camel/example/reportincident/ReportIncidentRoutesOSGiTest.java
new file mode 100644
index 0000000000000..938ce29fce661
--- /dev/null
+++ b/examples/camel-example-reportincident/src/test/java/org/apache/camel/example/reportincident/ReportIncidentRoutesOSGiTest.java
@@ -0,0 +1,118 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.example.reportincident;
+
+
+import org.apache.camel.CamelContext;
+
+import org.apache.camel.impl.DefaultCamelContext;
+import org.apache.camel.osgi.CamelContextFactory;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.cxf.jaxws.JaxWsProxyFactoryBean;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.jvnet.mock_javamail.Mailbox;
+import org.ops4j.pax.exam.Inject;
+import org.ops4j.pax.exam.Option;
+import org.ops4j.pax.exam.junit.Configuration;
+import org.ops4j.pax.exam.junit.JUnit4TestRunner;
+import org.ops4j.pax.swissbox.tinybundles.core.TinyBundle;
+import org.ops4j.pax.swissbox.tinybundles.dp.Constants;
+import org.osgi.framework.BundleContext;
+
+import static org.ops4j.pax.exam.CoreOptions.bundle;
+import static org.ops4j.pax.exam.CoreOptions.felix;
+import static org.ops4j.pax.exam.CoreOptions.mavenBundle;
+import static org.ops4j.pax.exam.CoreOptions.options;
+import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.logProfile;
+import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.profile;
+import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures;
+import static org.ops4j.pax.swissbox.tinybundles.core.TinyBundles.asURL;
+import static org.ops4j.pax.swissbox.tinybundles.core.TinyBundles.newBundle;
+import static org.ops4j.pax.swissbox.tinybundles.core.TinyBundles.withBnd;
+/**
+ * Unit test of our routes
+ */
+@RunWith(JUnit4TestRunner.class)
+public class ReportIncidentRoutesOSGiTest extends ReportIncidentRoutesTest {
+ private static final transient Log LOG = LogFactory.getLog(ReportIncidentRoutesOSGiTest.class);
+
+ @Inject
+ protected BundleContext bundleContext;
+
+ protected void startOSGiCamel() throws Exception {
+ CamelContextFactory factory = new CamelContextFactory();
+ factory.setBundleContext(bundleContext);
+ LOG.info("Get the bundleContext is " + bundleContext);
+ camel = factory.createContext();
+ ReportIncidentRoutes routes = new ReportIncidentRoutes();
+ routes.setUsingServletTransport(false);
+ camel.addRoutes(routes);
+ camel.start();
+ }
+
+
+ @Test
+ public void testRendportIncident() throws Exception {
+ startOSGiCamel();
+ runTest();
+ stopCamel();
+ }
+
+ @Configuration
+ public static Option[] configure() {
+ Option[] options = options(
+ // install log service using pax runners profile abstraction (there are more profiles, like DS)
+ logProfile().version("1.3.0"),
+ // install the spring dm profile
+ profile("spring.dm").version("1.2.0"),
+ // this is how you set the default log level when using pax logging (logProfile)
+ org.ops4j.pax.exam.CoreOptions.systemProperty("org.ops4j.pax.logging.DefaultServiceLog.level").value("INFO"),
+ org.ops4j.pax.exam.CoreOptions.systemProperty("org.apache.cxf.nofastinfoset").value("false"),
+ org.ops4j.pax.exam.CoreOptions.systemProperty("xml.catalog.staticCatalog").value("false"),
+ // using the features to install the camel components
+ scanFeatures(mavenBundle().groupId("org.apache.camel.karaf").
+ artifactId("features").versionAsInProject().type("xml/features"),
+ "camel-core", "camel-osgi", "camel-spring", "camel-test", "camel-velocity", "camel-cxf", "camel-mail"),
+
+ // Added the mock_java_mail bundle for testing
+ mavenBundle().groupId("org.apache.camel.tests").artifactId("org.apache.camel.tests.mock-javamail_1.7").versionAsInProject(),
+
+ // create a customer bundle start up the report incident bundle
+ bundle(newBundle().addClass(InputReportIncident.class)
+ .addClass(ObjectFactory.class)
+ .addClass(OutputReportIncident.class)
+ .addClass(ReportIncidentRoutesOSGiTest.class)
+ .addClass(ReportIncidentRoutesTest.class)
+ .addClass(ReportIncidentRoutes.class)
+ .addClass(MyBean.class)
+ .addClass(FilenameGenerator.class)
+ .addClass(ReportIncidentEndpoint.class)
+ .addClass(ReportIncidentEndpointService.class)
+ .addResource("etc/report_incident.wsdl", ReportIncidentRoutesTest.class.getResource("/etc/report_incident.wsdl"))
+ .addResource("etc/MailBody.vm", ReportIncidentRoutesTest.class.getResource("/etc/MailBody.vm"))
+ .prepare(withBnd().set(Constants.BUNDLE_SYMBOLICNAME, "CamelExampleReportIncidentBundle")
+ .set(Constants.EXPORT_PACKAGE, "org.apache.camel.example.reportincident,etc")).build(asURL()).toString()),
+
+
+ felix());
+
+ return options;
+ }
+
+}
diff --git a/examples/camel-example-reportincident/src/test/java/org/apache/camel/example/reportincident/ReportIncidentRoutesTest.java b/examples/camel-example-reportincident/src/test/java/org/apache/camel/example/reportincident/ReportIncidentRoutesTest.java
index 6a602a2186c43..8ecb760c0c3ff 100755
--- a/examples/camel-example-reportincident/src/test/java/org/apache/camel/example/reportincident/ReportIncidentRoutesTest.java
+++ b/examples/camel-example-reportincident/src/test/java/org/apache/camel/example/reportincident/ReportIncidentRoutesTest.java
@@ -20,12 +20,15 @@
import org.apache.camel.CamelContext;
import org.apache.camel.impl.DefaultCamelContext;
import org.apache.cxf.jaxws.JaxWsProxyFactoryBean;
+import org.junit.Test;
import org.jvnet.mock_javamail.Mailbox;
+import static org.junit.Assert.assertEquals;
+
/**
* Unit test of our routes
*/
-public class ReportIncidentRoutesTest extends TestCase {
+public class ReportIncidentRoutesTest {
// should be the same address as we have in our route
private static final String URL = "http://localhost:9080/camel-example-reportincident/webservices/incident";
@@ -52,10 +55,18 @@ protected static ReportIncidentEndpoint createCXFClient() {
return (ReportIncidentEndpoint) factory.create();
}
+ @Test
public void testRendportIncident() throws Exception {
// start camel
startCamel();
+ runTest();
+
+ // stop camel
+ stopCamel();
+ }
+
+ protected void runTest() throws Exception {
// assert mailbox is empty before starting
Mailbox inbox = Mailbox.get("[email protected]");
inbox.clear();
@@ -84,8 +95,5 @@ public void testRendportIncident() throws Exception {
// assert mail box
assertEquals("Should have got 1 mail", 1, inbox.size());
-
- // stop camel
- stopCamel();
}
}
diff --git a/parent/pom.xml b/parent/pom.xml
index d004878066b4d..b1808760a2837 100644
--- a/parent/pom.xml
+++ b/parent/pom.xml
@@ -443,6 +443,12 @@
<artifactId>camel-manual</artifactId>
<version>${project.version}</version>
</dependency>
+
+ <dependency>
+ <groupId>org.apache.camel.tests</groupId>
+ <artifactId>org.apache.camel.tests.mock-javamail_1.7</artifactId>
+ <version>${project.version}</version>
+ </dependency>
<!-- testing jars -->
<dependency>
diff --git a/pom.xml b/pom.xml
index e9b9433155aff..17c92e2ca0310 100755
--- a/pom.xml
+++ b/pom.xml
@@ -97,10 +97,10 @@
<module>parent</module>
<module>camel-core</module>
<module>components</module>
- <module>examples</module>
<module>platforms</module>
<module>tooling</module>
<module>tests</module>
+ <module>examples</module>
<module>apache-camel</module>
</modules>
diff --git a/tests/camel-itest-osgi/pom.xml b/tests/camel-itest-osgi/pom.xml
index a7a4329fdd905..36021410a093c 100644
--- a/tests/camel-itest-osgi/pom.xml
+++ b/tests/camel-itest-osgi/pom.xml
@@ -127,6 +127,21 @@
<groupId>org.apache.camel</groupId>
<artifactId>camel-servlet</artifactId>
<scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.camel.tests</groupId>
+ <artifactId>org.apache.camel.tests.mock-javamail_1.7</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-context-support</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.camel</groupId>
+ <artifactId>camel-test</artifactId>
+ <scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.camel</groupId>
diff --git a/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/MailRouteTest.java b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/MailRouteTest.java
new file mode 100644
index 0000000000000..11260009a1006
--- /dev/null
+++ b/tests/camel-itest-osgi/src/test/java/org/apache/camel/itest/osgi/MailRouteTest.java
@@ -0,0 +1,146 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.itest.osgi;
+
+import java.io.InputStream;
+import java.util.HashMap;
+
+import javax.mail.Address;
+import javax.mail.Message;
+import javax.mail.Message.RecipientType;
+
+
+import org.apache.camel.Exchange;
+import org.apache.camel.builder.RouteBuilder;
+import org.apache.camel.component.mock.MockEndpoint;
+import org.apache.camel.converter.IOConverter;
+import org.apache.camel.test.junit4.CamelTestSupport;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.jvnet.mock_javamail.Mailbox;
+import org.ops4j.pax.exam.Option;
+import org.ops4j.pax.exam.junit.Configuration;
+import org.ops4j.pax.exam.junit.JUnit4TestRunner;
+
+import static org.ops4j.pax.exam.CoreOptions.felix;
+import static org.ops4j.pax.exam.CoreOptions.mavenBundle;
+import static org.ops4j.pax.exam.CoreOptions.options;
+import static org.ops4j.pax.exam.CoreOptions.provision;
+import static org.ops4j.pax.exam.CoreOptions.wrappedBundle;
+import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.logProfile;
+import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.profile;
+import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.scanFeatures;
+
+@RunWith(JUnit4TestRunner.class)
+public class MailRouteTest extends OSGiIntegrationTestSupport {
+
+ @Test
+ public void testSendAndReceiveMails() throws Exception {
+ Mailbox.clearAll();
+
+ MockEndpoint resultEndpoint = getMockEndpoint("mock:result");
+ resultEndpoint.expectedBodiesReceived("hello world!");
+
+ HashMap<String, Object> headers = new HashMap<String, Object>();
+ headers.put("reply-to", "route-test-reply@localhost");
+ template.sendBodyAndHeaders("smtp://route-test-james@localhost", "hello world!", headers);
+
+ // lets test the first sent worked
+ assertMailboxReceivedMessages("route-test-james@localhost");
+
+ // lets sleep to check that the mail poll does not redeliver duplicate mails
+ Thread.sleep(3000);
+
+ // lets test the receive worked
+ resultEndpoint.assertIsSatisfied();
+
+ // Validate that the headers were preserved.
+ Exchange exchange = resultEndpoint.getReceivedExchanges().get(0);
+ String replyTo = (String)exchange.getIn().getHeader("reply-to");
+ assertEquals("route-test-reply@localhost", replyTo);
+
+ assertMailboxReceivedMessages("route-test-copy@localhost");
+ }
+
+ protected void assertMailboxReceivedMessages(String name) throws Exception {
+ Mailbox mailbox = Mailbox.get(name);
+ assertEquals(name + " should have received 1 mail", 1, mailbox.size());
+
+ Message message = mailbox.get(0);
+ assertNotNull(name + " should have received at least one mail!", message);
+ Object content = message.getContent();
+ assertNotNull("The content should not be null!", content);
+ if (content instanceof InputStream) {
+ assertEquals("hello world!", IOConverter.toString((InputStream)content));
+ } else {
+ assertEquals("hello world!", message.getContent());
+ }
+ assertEquals("camel@localhost", message.getFrom()[0].toString());
+ boolean found = false;
+ for (Address adr : message.getRecipients(RecipientType.TO)) {
+ if (name.equals(adr.toString())) {
+ found = true;
+ }
+ }
+ assertTrue("Should have found the recpient to in the mail: " + name, found);
+ }
+
+ @Override
+ protected RouteBuilder createRouteBuilder() {
+ return new RouteBuilder() {
+ public void configure() {
+ from("pop3://route-test-james@localhost?consumer.delay=1000")
+ .to("direct:a");
+
+ // must use fixed to option to send the mail to the given reciever, as we have polled
+ // a mail from a mailbox where it already has the 'old' To as header value
+ // here we send the mail to 2 recievers. notice we can use a plain string with semi colon
+ // to seperate the mail addresses
+ from("direct:a")
+ .setHeader("to", constant("route-test-result@localhost; route-test-copy@localhost"))
+ .to("smtp://localhost");
+
+ from("pop3://route-test-result@localhost?consumer.delay=1000")
+ .convertBodyTo(String.class).to("mock:result");
+ }
+ };
+ }
+
+ @Configuration
+ public static Option[] configure() {
+ Option[] options = options(
+ // install log service using pax runners profile abstraction (there are more profiles, like DS)
+ logProfile().version("1.3.0"),
+ // install the spring dm profile
+ profile("spring.dm").version("1.2.0"),
+ // this is how you set the default log level when using pax logging (logProfile)
+ org.ops4j.pax.exam.CoreOptions.systemProperty("org.ops4j.pax.logging.DefaultServiceLog.level").value("INFO"),
+
+ // using the features to install the camel components
+ scanFeatures(mavenBundle().groupId("org.apache.camel.karaf").
+ artifactId("features").versionAsInProject().type("xml/features"),
+ "camel-core", "camel-osgi", "camel-spring", "camel-test", "camel-mail"),
+
+ // Added the mock_java_mail bundle for testing
+ mavenBundle().groupId("org.apache.camel.tests").artifactId("org.apache.camel.tests.mock-javamail_1.7").versionAsInProject(),
+
+ felix());
+
+ return options;
+ }
+
+}
diff --git a/tests/camel-itest-osgi/src/test/resources/log4j.properties b/tests/camel-itest-osgi/src/test/resources/log4j.properties
index 58f8a8380214e..48315a605ba15 100644
--- a/tests/camel-itest-osgi/src/test/resources/log4j.properties
+++ b/tests/camel-itest-osgi/src/test/resources/log4j.properties
@@ -18,7 +18,7 @@
#
# The logging properties used during tests..
#
-log4j.rootLogger=INFO, stdout
+log4j.rootLogger=INFO, out
# Use the following line to turn on debug output for camel
#log4j.logger.org.apache.camel=DEBUG
|
50e3ca62e5b5cceb13ead212f50aaae57e8990f5
|
orientdb
|
Working to fix corrupted data in sockets--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/OBinaryNetworkProtocolAbstract.java b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/OBinaryNetworkProtocolAbstract.java
index 7b3c2430ba3..4d2373a9126 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/OBinaryNetworkProtocolAbstract.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/OBinaryNetworkProtocolAbstract.java
@@ -154,6 +154,12 @@ protected void sendOk(final int iClientTxId) throws IOException {
}
protected void sendError(final int iClientTxId, final Throwable t) throws IOException {
+ if (t instanceof SocketException) {
+ // DON'T SEND TO THE CLIENT BECAUSE THE SOCKET HAS PROBLEMS
+ shutdown();
+ return;
+ }
+
channel.acquireExclusiveLock();
try {
|
6cf1c05df962548294a2ec5059e6176ca24b8dfc
|
intellij-community
|
formatting of injected code refactored - 3--
|
p
|
https://github.com/JetBrains/intellij-community
|
diff --git a/platform/lang-impl/src/com/intellij/psi/formatter/common/InjectedLanguageBlockBuilder.java b/platform/lang-impl/src/com/intellij/psi/formatter/common/InjectedLanguageBlockBuilder.java
index 443caba5c1eb5..fba17948117c0 100644
--- a/platform/lang-impl/src/com/intellij/psi/formatter/common/InjectedLanguageBlockBuilder.java
+++ b/platform/lang-impl/src/com/intellij/psi/formatter/common/InjectedLanguageBlockBuilder.java
@@ -38,14 +38,16 @@ public abstract class InjectedLanguageBlockBuilder {
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.formatter.xml.XmlInjectedLanguageBlockBuilder");
public Block createInjectedBlock(ASTNode node, Block originalBlock, Indent indent, int offset, TextRange range) {
- return new InjectedLanguageBlockWrapper(originalBlock, offset, range);
+ return new InjectedLanguageBlockWrapper(originalBlock, offset, range, indent);
}
public abstract CodeStyleSettings getSettings();
public abstract boolean canProcessFragment(String text, ASTNode injectionHost);
- public abstract Block createBlockNextToInjection(ASTNode node, Wrap wrap, Alignment alignment, Indent indent, TextRange range);
+ public abstract Block createBlockBeforeInjection(ASTNode node, Wrap wrap, Alignment alignment, Indent indent, TextRange range);
+
+ public abstract Block createBlockAfterInjection(ASTNode node, Wrap wrap, Alignment alignment, Indent indent, TextRange range);
public boolean addInjectedBlocks(List<Block> result, final ASTNode injectionHost, Wrap wrap, Alignment alignment, Indent indent) {
final PsiFile[] injectedFile = new PsiFile[1];
@@ -85,7 +87,7 @@ public void visit(@NotNull final PsiFile injectedPsi, @NotNull final List<PsiLan
int childOffset = range.getStartOffset();
if (startOffset != 0) {
final ASTNode leaf = injectionHost.findLeafElementAt(startOffset - 1);
- result.add(createBlockNextToInjection(leaf, wrap, alignment, indent, new TextRange(childOffset, childOffset + startOffset)));
+ result.add(createBlockBeforeInjection(leaf, wrap, alignment, indent, new TextRange(childOffset, childOffset + startOffset)));
}
addInjectedLanguageBlockWrapper(result, injectedFile[0].getNode(), indent, childOffset + startOffset,
@@ -93,7 +95,7 @@ public void visit(@NotNull final PsiFile injectedPsi, @NotNull final List<PsiLan
if (endOffset != injectionHost.getTextLength()) {
final ASTNode leaf = injectionHost.findLeafElementAt(endOffset);
- result.add(createBlockNextToInjection(leaf, wrap, alignment, indent, new TextRange(childOffset + endOffset, range.getEndOffset())));
+ result.add(createBlockAfterInjection(leaf, wrap, alignment, indent, new TextRange(childOffset + endOffset, range.getEndOffset())));
}
return true;
}
diff --git a/platform/lang-impl/src/com/intellij/psi/formatter/common/InjectedLanguageBlockWrapper.java b/platform/lang-impl/src/com/intellij/psi/formatter/common/InjectedLanguageBlockWrapper.java
index 039e4909bd284..566400540370a 100644
--- a/platform/lang-impl/src/com/intellij/psi/formatter/common/InjectedLanguageBlockWrapper.java
+++ b/platform/lang-impl/src/com/intellij/psi/formatter/common/InjectedLanguageBlockWrapper.java
@@ -27,6 +27,7 @@ public final class InjectedLanguageBlockWrapper implements Block {
private final Block myOriginal;
private final int myOffset;
private final TextRange myRange;
+ @Nullable private final Indent myIndent;
private List<Block> myBlocks;
/**
@@ -40,15 +41,17 @@ public final class InjectedLanguageBlockWrapper implements Block {
* @param original block inside injected code
* @param offset start offset of injected code inside the main document
* @param range range of code inside injected document which is really placed in the main document
+ * @param indent
*/
- public InjectedLanguageBlockWrapper(final @NotNull Block original, final int offset, @Nullable TextRange range) {
+ public InjectedLanguageBlockWrapper(final @NotNull Block original, final int offset, @Nullable TextRange range, @Nullable Indent indent) {
myOriginal = original;
myOffset = offset;
myRange = range;
+ myIndent = indent;
}
public Indent getIndent() {
- return myOriginal.getIndent();
+ return myIndent != null ? myIndent : myOriginal.getIndent();
}
@Nullable
@@ -58,7 +61,10 @@ public Alignment getAlignment() {
@NotNull
public TextRange getTextRange() {
- final TextRange range = myOriginal.getTextRange();
+ TextRange range = myOriginal.getTextRange();
+ if (myRange != null) {
+ range = range.intersection(myRange);
+ }
int start = myOffset + range.getStartOffset() - (myRange != null ? myRange.getStartOffset() : 0);
return TextRange.from(start, range.getLength());
@@ -80,7 +86,7 @@ private List<Block> buildBlocks() {
final ArrayList<Block> result = new ArrayList<Block>(list.size());
if (myRange == null) {
for (Block block : list) {
- result.add(new InjectedLanguageBlockWrapper(block, myOffset, myRange));
+ result.add(new InjectedLanguageBlockWrapper(block, myOffset, myRange, null));
}
}
else {
@@ -93,7 +99,7 @@ private void collectBlocksIntersectingRange(final List<Block> list, final List<B
for (Block block : list) {
final TextRange textRange = block.getTextRange();
if (range.contains(textRange)) {
- result.add(new InjectedLanguageBlockWrapper(block, myOffset, range));
+ result.add(new InjectedLanguageBlockWrapper(block, myOffset, range, null));
}
else if (textRange.intersectsStrict(range)) {
collectBlocksIntersectingRange(block.getSubBlocks(), result, range);
diff --git a/xml/impl/src/com/intellij/psi/formatter/xml/AnotherLanguageBlockWrapper.java b/xml/impl/src/com/intellij/psi/formatter/xml/AnotherLanguageBlockWrapper.java
index 155b675fe4153..005519dab9561 100644
--- a/xml/impl/src/com/intellij/psi/formatter/xml/AnotherLanguageBlockWrapper.java
+++ b/xml/impl/src/com/intellij/psi/formatter/xml/AnotherLanguageBlockWrapper.java
@@ -37,7 +37,7 @@ public AnotherLanguageBlockWrapper(final ASTNode node,
final int offset,
@Nullable TextRange range) {
super(node, original.getWrap(), original.getAlignment(), policy);
- myInjectedBlock = new InjectedLanguageBlockWrapper(original, offset, range);
+ myInjectedBlock = new InjectedLanguageBlockWrapper(original, offset, range, null);
myIndent = indent;
}
diff --git a/xml/impl/src/com/intellij/psi/formatter/xml/XmlInjectedLanguageBlockBuilder.java b/xml/impl/src/com/intellij/psi/formatter/xml/XmlInjectedLanguageBlockBuilder.java
index da40e1071fcf6..ad97addfdf1ca 100644
--- a/xml/impl/src/com/intellij/psi/formatter/xml/XmlInjectedLanguageBlockBuilder.java
+++ b/xml/impl/src/com/intellij/psi/formatter/xml/XmlInjectedLanguageBlockBuilder.java
@@ -42,7 +42,12 @@ public Block createInjectedBlock(ASTNode node, Block originalBlock, Indent inden
}
@Override
- public Block createBlockNextToInjection(ASTNode node, Wrap wrap, Alignment alignment, Indent indent, TextRange range) {
+ public Block createBlockBeforeInjection(ASTNode node, Wrap wrap, Alignment alignment, Indent indent, TextRange range) {
+ return new XmlBlock(node, wrap, alignment, myXmlFormattingPolicy, indent, range);
+ }
+
+ @Override
+ public Block createBlockAfterInjection(ASTNode node, Wrap wrap, Alignment alignment, Indent indent, TextRange range) {
return new XmlBlock(node, wrap, alignment, myXmlFormattingPolicy, indent, range);
}
|
c3a1bbd206678a74f06a2ee2320b3547e26c3d43
|
drools
|
JBRULES-1520: moving test to correct test file--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@19375 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
p
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/FirstOrderLogicTest.java b/drools-compiler/src/test/java/org/drools/integrationtests/FirstOrderLogicTest.java
index 6faeee4fdc9..0f4aa59cbbc 100644
--- a/drools-compiler/src/test/java/org/drools/integrationtests/FirstOrderLogicTest.java
+++ b/drools-compiler/src/test/java/org/drools/integrationtests/FirstOrderLogicTest.java
@@ -82,6 +82,34 @@ public void testCollect() throws Exception {
results.get( 0 ).getClass().getName() );
}
+ public void testCollectNodeSharing() throws Exception {
+ final PackageBuilder builder = new PackageBuilder();
+ builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_collectNodeSharing.drl" ) ) );
+ final Package pkg = builder.getPackage();
+
+ RuleBase ruleBase = getRuleBase();
+ ruleBase.addPackage( pkg );
+ ruleBase = SerializationHelper.serializeObject(ruleBase);
+ final WorkingMemory workingMemory = ruleBase.newStatefulSession();
+
+ final List list = new ArrayList();
+ workingMemory.setGlobal( "results",
+ list );
+
+ workingMemory.insert( new Cheese( "stilton",
+ 10 ) );
+ workingMemory.insert( new Cheese( "brie",
+ 15 ) );
+
+ workingMemory.fireAllRules();
+
+ assertEquals( 1,
+ list.size() );
+
+ assertEquals( 2,
+ ((List) list.get( 0 )).size() );
+ }
+
public void testCollectModify() throws Exception {
// read in the source
final Reader reader = new InputStreamReader( getClass().getResourceAsStream( "test_Collect.drl" ) );
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java b/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java
index 7e8b54a5575..74a51f95268 100644
--- a/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java
+++ b/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java
@@ -3065,34 +3065,6 @@ public void testContainsInArray() throws Exception {
list.get( 1 ) );
}
- public void testCollectNodeSharing() throws Exception {
- final PackageBuilder builder = new PackageBuilder();
- builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_collectNodeSharing.drl" ) ) );
- final Package pkg = builder.getPackage();
-
- RuleBase ruleBase = getRuleBase();
- ruleBase.addPackage( pkg );
- ruleBase = SerializationHelper.serializeObject(ruleBase);
- final WorkingMemory workingMemory = ruleBase.newStatefulSession();
-
- final List list = new ArrayList();
- workingMemory.setGlobal( "results",
- list );
-
- workingMemory.insert( new Cheese( "stilton",
- 10 ) );
- workingMemory.insert( new Cheese( "brie",
- 15 ) );
-
- workingMemory.fireAllRules();
-
- assertEquals( 1,
- list.size() );
-
- assertEquals( 2,
- ((List) list.get( 0 )).size() );
- }
-
public void testNodeSharingNotExists() throws Exception {
final PackageBuilder builder = new PackageBuilder();
builder.addPackageFromDrl( new InputStreamReader( getClass().getResourceAsStream( "test_nodeSharingNotExists.drl" ) ) );
|
0ace17c659c5e73c9de4003718ba4860bfa3be43
|
restlet-framework-java
|
- Initial code for new default HTTP connector and- SIP connector.--
|
a
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet/src/org/restlet/engine/http/connector/AcceptorTask.java b/modules/org.restlet/src/org/restlet/engine/http/connector/AcceptorTask.java
index 4d9b6e0d2a..5ab1b6022d 100644
--- a/modules/org.restlet/src/org/restlet/engine/http/connector/AcceptorTask.java
+++ b/modules/org.restlet/src/org/restlet/engine/http/connector/AcceptorTask.java
@@ -99,9 +99,8 @@ public void run() {
if ((getHelper().getMaxTotalConnections() == -1)
|| (connectionsCount <= getHelper()
.getMaxTotalConnections())) {
- final Connection<?> connection = getHelper()
- .createConnection(getHelper(),
- client.socket());
+ Connection<?> connection = getHelper().createConnection(
+ getHelper(), client.socket());
connection.open();
getHelper().getConnections().add(connection);
} else {
diff --git a/modules/org.restlet/src/org/restlet/engine/http/connector/ControllerTask.java b/modules/org.restlet/src/org/restlet/engine/http/connector/ControllerTask.java
index 1a9eb2dc42..200a8b2377 100644
--- a/modules/org.restlet/src/org/restlet/engine/http/connector/ControllerTask.java
+++ b/modules/org.restlet/src/org/restlet/engine/http/connector/ControllerTask.java
@@ -146,6 +146,10 @@ public void run() {
}
});
}
+
+ if (conn.getState() == ConnectionState.CLOSED) {
+ getHelper().getConnections().remove(conn);
+ }
}
// Control if there are some pending requests that could
|
b12acbcf9e65a80fc88871bb81d2c2b9cc44604c
|
elasticsearch
|
introduce read/writeSharedString while streaming- currently, we treat all strings as shared (either by full equality or- identity equality), while almost all times we know if they should be- serialized as shared or not. Add an explicitly write/readSharedString, and- use it where applicable, and all other write/readString will not treat them- as shared relates to -3322--
|
c
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java b/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java
index 09c3c659987fd..7b310461c2e30 100644
--- a/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java
+++ b/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java
@@ -32,8 +32,6 @@
/**
* Represents a single item response for an action executed as part of the bulk API. Holds the index/type/id
* of the relevant action, and if it has failed or not (with the failure message incase it failed).
- *
- *
*/
public class BulkItemResponse implements Streamable {
@@ -148,8 +146,7 @@ public String getType() {
return ((IndexResponse) response).getType();
} else if (response instanceof DeleteResponse) {
return ((DeleteResponse) response).getType();
- }
- else if (response instanceof UpdateResponse) {
+ } else if (response instanceof UpdateResponse) {
return ((UpdateResponse) response).getType();
}
return null;
@@ -230,7 +227,7 @@ public static BulkItemResponse readBulkItem(StreamInput in) throws IOException {
@Override
public void readFrom(StreamInput in) throws IOException {
id = in.readVInt();
- opType = in.readString();
+ opType = in.readSharedString();
byte type = in.readByte();
if (type == 0) {
@@ -245,14 +242,15 @@ public void readFrom(StreamInput in) throws IOException {
}
if (in.readBoolean()) {
- failure = new Failure(in.readString(), in.readString(), in.readString(), in.readString());
+ failure = new Failure(in.readSharedString(), in.readSharedString(), in.readString(), in.readString());
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(id);
- out.writeString(opType);
+ out.writeSharedString(opType);
+
if (response == null) {
out.writeByte((byte) 2);
} else {
@@ -269,8 +267,8 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
- out.writeString(failure.getIndex());
- out.writeString(failure.getType());
+ out.writeSharedString(failure.getIndex());
+ out.writeSharedString(failure.getType());
out.writeString(failure.getId());
out.writeString(failure.getMessage());
}
diff --git a/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java b/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java
index c7ed3bb9831f0..6cc6ec20a6fd9 100644
--- a/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java
+++ b/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java
@@ -201,7 +201,7 @@ public VersionType versionType() {
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
- type = in.readString();
+ type = in.readSharedString();
id = in.readString();
routing = in.readOptionalString();
refresh = in.readBoolean();
@@ -212,7 +212,7 @@ public void readFrom(StreamInput in) throws IOException {
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
- out.writeString(type);
+ out.writeSharedString(type);
out.writeString(id);
out.writeOptionalString(routing());
out.writeBoolean(refresh);
diff --git a/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java b/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java
index 3324af619a7c1..f761346860ef0 100644
--- a/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java
+++ b/src/main/java/org/elasticsearch/action/delete/DeleteResponse.java
@@ -89,9 +89,9 @@ public boolean isNotFound() {
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
- index = in.readString();
+ index = in.readSharedString();
+ type = in.readSharedString();
id = in.readString();
- type = in.readString();
version = in.readLong();
notFound = in.readBoolean();
}
@@ -99,9 +99,9 @@ public void readFrom(StreamInput in) throws IOException {
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
- out.writeString(index);
+ out.writeSharedString(index);
+ out.writeSharedString(type);
out.writeString(id);
- out.writeString(type);
out.writeLong(version);
out.writeBoolean(notFound);
}
diff --git a/src/main/java/org/elasticsearch/action/get/GetRequest.java b/src/main/java/org/elasticsearch/action/get/GetRequest.java
index f51166e960a94..72e05d3051450 100644
--- a/src/main/java/org/elasticsearch/action/get/GetRequest.java
+++ b/src/main/java/org/elasticsearch/action/get/GetRequest.java
@@ -200,7 +200,7 @@ public GetRequest realtime(Boolean realtime) {
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
- type = in.readString();
+ type = in.readSharedString();
id = in.readString();
routing = in.readOptionalString();
preference = in.readOptionalString();
@@ -223,7 +223,7 @@ public void readFrom(StreamInput in) throws IOException {
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
- out.writeString(type);
+ out.writeSharedString(type);
out.writeString(id);
out.writeOptionalString(routing);
out.writeOptionalString(preference);
diff --git a/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java b/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java
index c6ca9dd2ff410..abbf7e8d00bb5 100644
--- a/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java
+++ b/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java
@@ -118,14 +118,10 @@ public static Item readItem(StreamInput in) throws IOException {
@Override
public void readFrom(StreamInput in) throws IOException {
- index = in.readString();
- if (in.readBoolean()) {
- type = in.readString();
- }
+ index = in.readSharedString();
+ type = in.readOptionalSharedString();
id = in.readString();
- if (in.readBoolean()) {
- routing = in.readString();
- }
+ routing = in.readOptionalString();
int size = in.readVInt();
if (size > 0) {
fields = new String[size];
@@ -137,20 +133,10 @@ public void readFrom(StreamInput in) throws IOException {
@Override
public void writeTo(StreamOutput out) throws IOException {
- out.writeString(index);
- if (type == null) {
- out.writeBoolean(false);
- } else {
- out.writeBoolean(true);
- out.writeString(type);
- }
+ out.writeSharedString(index);
+ out.writeOptionalSharedString(type);
out.writeString(id);
- if (routing == null) {
- out.writeBoolean(false);
- } else {
- out.writeBoolean(true);
- out.writeString(routing);
- }
+ out.writeOptionalString(routing);
if (fields == null) {
out.writeVInt(0);
} else {
diff --git a/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java b/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java
index a46c464867e37..72a2e2a7fa1bd 100644
--- a/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java
+++ b/src/main/java/org/elasticsearch/action/get/MultiGetShardRequest.java
@@ -108,7 +108,7 @@ public void readFrom(StreamInput in) throws IOException {
for (int i = 0; i < size; i++) {
locations.add(in.readVInt());
if (in.readBoolean()) {
- types.add(in.readString());
+ types.add(in.readSharedString());
} else {
types.add(null);
}
@@ -145,7 +145,7 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
- out.writeString(types.get(i));
+ out.writeSharedString(types.get(i));
}
out.writeString(ids.get(i));
if (fields.get(i) == null) {
diff --git a/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/src/main/java/org/elasticsearch/action/index/IndexRequest.java
index c4a8dcb9cfe84..b0441fb4e35be 100644
--- a/src/main/java/org/elasticsearch/action/index/IndexRequest.java
+++ b/src/main/java/org/elasticsearch/action/index/IndexRequest.java
@@ -608,7 +608,7 @@ public void process(MetaData metaData, String aliasOrIndex, @Nullable MappingMet
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
- type = in.readString();
+ type = in.readSharedString();
id = in.readOptionalString();
routing = in.readOptionalString();
parent = in.readOptionalString();
@@ -626,7 +626,7 @@ public void readFrom(StreamInput in) throws IOException {
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
- out.writeString(type);
+ out.writeSharedString(type);
out.writeOptionalString(id);
out.writeOptionalString(routing);
out.writeOptionalString(parent);
diff --git a/src/main/java/org/elasticsearch/action/index/IndexResponse.java b/src/main/java/org/elasticsearch/action/index/IndexResponse.java
index b348d02257b91..1f3d1f3eb67a6 100644
--- a/src/main/java/org/elasticsearch/action/index/IndexResponse.java
+++ b/src/main/java/org/elasticsearch/action/index/IndexResponse.java
@@ -89,9 +89,9 @@ public boolean isCreated() {
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
- index = in.readString();
+ index = in.readSharedString();
+ type = in.readSharedString();
id = in.readString();
- type = in.readString();
version = in.readLong();
created = in.readBoolean();
}
@@ -99,9 +99,9 @@ public void readFrom(StreamInput in) throws IOException {
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
- out.writeString(index);
+ out.writeSharedString(index);
+ out.writeSharedString(type);
out.writeString(id);
- out.writeString(type);
out.writeLong(version);
out.writeBoolean(created);
}
diff --git a/src/main/java/org/elasticsearch/action/support/replication/ShardReplicationOperationRequest.java b/src/main/java/org/elasticsearch/action/support/replication/ShardReplicationOperationRequest.java
index c050acf5f1455..09ab720483eae 100644
--- a/src/main/java/org/elasticsearch/action/support/replication/ShardReplicationOperationRequest.java
+++ b/src/main/java/org/elasticsearch/action/support/replication/ShardReplicationOperationRequest.java
@@ -161,7 +161,7 @@ public void readFrom(StreamInput in) throws IOException {
replicationType = ReplicationType.fromId(in.readByte());
consistencyLevel = WriteConsistencyLevel.fromId(in.readByte());
timeout = TimeValue.readTimeValue(in);
- index = in.readString();
+ index = in.readSharedString();
// no need to serialize threaded* parameters, since they only matter locally
}
@@ -171,7 +171,7 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeByte(replicationType.id());
out.writeByte(consistencyLevel.id());
timeout.writeTo(out);
- out.writeString(index);
+ out.writeSharedString(index);
}
/**
diff --git a/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/src/main/java/org/elasticsearch/action/update/UpdateRequest.java
index d0802655c5ef3..a94d4f3c105b6 100644
--- a/src/main/java/org/elasticsearch/action/update/UpdateRequest.java
+++ b/src/main/java/org/elasticsearch/action/update/UpdateRequest.java
@@ -580,7 +580,7 @@ public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
replicationType = ReplicationType.fromId(in.readByte());
consistencyLevel = WriteConsistencyLevel.fromId(in.readByte());
- type = in.readString();
+ type = in.readSharedString();
id = in.readString();
routing = in.readOptionalString();
script = in.readOptionalString();
@@ -615,7 +615,7 @@ public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeByte(replicationType.id());
out.writeByte(consistencyLevel.id());
- out.writeString(type);
+ out.writeSharedString(type);
out.writeString(id);
out.writeOptionalString(routing);
out.writeOptionalString(script);
diff --git a/src/main/java/org/elasticsearch/action/update/UpdateResponse.java b/src/main/java/org/elasticsearch/action/update/UpdateResponse.java
index 215986e697fad..db6cbf10f1bb3 100644
--- a/src/main/java/org/elasticsearch/action/update/UpdateResponse.java
+++ b/src/main/java/org/elasticsearch/action/update/UpdateResponse.java
@@ -96,9 +96,9 @@ public boolean isCreated() {
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
- index = in.readString();
+ index = in.readSharedString();
+ type = in.readSharedString();
id = in.readString();
- type = in.readString();
version = in.readLong();
created = in.readBoolean();
if (in.readBoolean()) {
@@ -109,9 +109,9 @@ public void readFrom(StreamInput in) throws IOException {
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
- out.writeString(index);
+ out.writeSharedString(index);
+ out.writeSharedString(type);
out.writeString(id);
- out.writeString(type);
out.writeLong(version);
out.writeBoolean(created);
if (getResult == null) {
diff --git a/src/main/java/org/elasticsearch/common/io/stream/AdapterStreamInput.java b/src/main/java/org/elasticsearch/common/io/stream/AdapterStreamInput.java
index 98559a35bd581..f900ac8e6b063 100644
--- a/src/main/java/org/elasticsearch/common/io/stream/AdapterStreamInput.java
+++ b/src/main/java/org/elasticsearch/common/io/stream/AdapterStreamInput.java
@@ -103,6 +103,11 @@ public String readString() throws IOException {
return in.readString();
}
+ @Override
+ public String readSharedString() throws IOException {
+ return in.readSharedString();
+ }
+
@Override
public Text readText() throws IOException {
return in.readText();
diff --git a/src/main/java/org/elasticsearch/common/io/stream/AdapterStreamOutput.java b/src/main/java/org/elasticsearch/common/io/stream/AdapterStreamOutput.java
index b506338e74a88..fa8a0f481b00b 100644
--- a/src/main/java/org/elasticsearch/common/io/stream/AdapterStreamOutput.java
+++ b/src/main/java/org/elasticsearch/common/io/stream/AdapterStreamOutput.java
@@ -131,6 +131,11 @@ public void writeString(String str) throws IOException {
out.writeString(str);
}
+ @Override
+ public void writeSharedString(String str) throws IOException {
+ out.writeSharedString(str);
+ }
+
@Override
public void writeText(Text text) throws IOException {
out.writeText(text);
diff --git a/src/main/java/org/elasticsearch/common/io/stream/HandlesStreamInput.java b/src/main/java/org/elasticsearch/common/io/stream/HandlesStreamInput.java
index 94041d53c512c..4520daed2e2be 100644
--- a/src/main/java/org/elasticsearch/common/io/stream/HandlesStreamInput.java
+++ b/src/main/java/org/elasticsearch/common/io/stream/HandlesStreamInput.java
@@ -30,8 +30,6 @@
public class HandlesStreamInput extends AdapterStreamInput {
private final TIntObjectHashMap<String> handles = new TIntObjectHashMap<String>();
- private final TIntObjectHashMap<String> identityHandles = new TIntObjectHashMap<String>();
-
private final TIntObjectHashMap<Text> handlesText = new TIntObjectHashMap<Text>();
HandlesStreamInput() {
@@ -43,7 +41,7 @@ public HandlesStreamInput(StreamInput in) {
}
@Override
- public String readString() throws IOException {
+ public String readSharedString() throws IOException {
byte b = in.readByte();
if (b == 0) {
// full string with handle
@@ -53,19 +51,16 @@ public String readString() throws IOException {
return s;
} else if (b == 1) {
return handles.get(in.readVInt());
- } else if (b == 2) {
- // full string with handle
- int handle = in.readVInt();
- String s = in.readString();
- identityHandles.put(handle, s);
- return s;
- } else if (b == 3) {
- return identityHandles.get(in.readVInt());
} else {
throw new IOException("Expected handle header, got [" + b + "]");
}
}
+ @Override
+ public String readString() throws IOException {
+ return in.readString();
+ }
+
@Override
public Text readSharedText() throws IOException {
byte b = in.readByte();
@@ -86,21 +81,15 @@ public Text readSharedText() throws IOException {
@Override
public void reset() throws IOException {
super.reset();
- handles.clear();
- identityHandles.clear();
- handlesText.clear();
+ cleanHandles();
}
public void reset(StreamInput in) {
super.reset(in);
- handles.clear();
- identityHandles.clear();
- handlesText.clear();
+ cleanHandles();
}
public void cleanHandles() {
handles.clear();
- identityHandles.clear();
- handlesText.clear();
}
}
diff --git a/src/main/java/org/elasticsearch/common/io/stream/HandlesStreamOutput.java b/src/main/java/org/elasticsearch/common/io/stream/HandlesStreamOutput.java
index 65b1b32d02005..a092e7bd72a58 100644
--- a/src/main/java/org/elasticsearch/common/io/stream/HandlesStreamOutput.java
+++ b/src/main/java/org/elasticsearch/common/io/stream/HandlesStreamOutput.java
@@ -24,91 +24,57 @@
import org.elasticsearch.common.text.Text;
import java.io.IOException;
-import java.util.Arrays;
/**
*
*/
public class HandlesStreamOutput extends AdapterStreamOutput {
- private static final int DEFAULT_IDENTITY_THRESHOLD = 50;
-
- // a threshold above which strings will use identity check
- private final int identityThreshold;
-
private final TObjectIntHashMap<String> handles = new TObjectIntHashMap<String>(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, -1);
- private final HandleTable identityHandles = new HandleTable(10, (float) 3.00);
-
private final TObjectIntHashMap<Text> handlesText = new TObjectIntHashMap<Text>(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, -1);
public HandlesStreamOutput(StreamOutput out) {
- this(out, DEFAULT_IDENTITY_THRESHOLD);
- }
-
- public HandlesStreamOutput(StreamOutput out, int identityThreshold) {
super(out);
- this.identityThreshold = identityThreshold;
}
@Override
- public void writeString(String s) throws IOException {
- if (s.length() < identityThreshold) {
- int handle = handles.get(s);
- if (handle == -1) {
- handle = handles.size();
- handles.put(s, handle);
- out.writeByte((byte) 0);
- out.writeVInt(handle);
- out.writeString(s);
- } else {
- out.writeByte((byte) 1);
- out.writeVInt(handle);
- }
+ public void writeSharedString(String str) throws IOException {
+ int handle = handles.get(str);
+ if (handle == -1) {
+ handle = handles.size();
+ handles.put(str, handle);
+ out.writeByte((byte) 0);
+ out.writeVInt(handle);
+ out.writeString(str);
} else {
- int handle = identityHandles.lookup(s);
- if (handle == -1) {
- handle = identityHandles.assign(s);
- out.writeByte((byte) 2);
- out.writeVInt(handle);
- out.writeString(s);
- } else {
- out.writeByte((byte) 3);
- out.writeVInt(handle);
- }
+ out.writeByte((byte) 1);
+ out.writeVInt(handle);
}
}
+ @Override
+ public void writeString(String s) throws IOException {
+ out.writeString(s);
+ }
+
@Override
public void writeSharedText(Text text) throws IOException {
- int length;
- if (text.hasBytes()) {
- length = text.bytes().length();
- } else {
- length = text.string().length();
- }
- if (length < identityThreshold) {
- int handle = handlesText.get(text);
- if (handle == -1) {
- handle = handlesText.size();
- handlesText.put(text, handle);
- out.writeByte((byte) 0);
- out.writeVInt(handle);
- out.writeText(text);
- } else {
- out.writeByte((byte) 1);
- out.writeVInt(handle);
- }
- } else {
- out.writeByte((byte) 2);
+ int handle = handlesText.get(text);
+ if (handle == -1) {
+ handle = handlesText.size();
+ handlesText.put(text, handle);
+ out.writeByte((byte) 0);
+ out.writeVInt(handle);
out.writeText(text);
+ } else {
+ out.writeByte((byte) 1);
+ out.writeVInt(handle);
}
}
@Override
public void reset() throws IOException {
- handles.clear();
- identityHandles.clear();
- handlesText.clear();
+ clear();
if (out != null) {
out.reset();
}
@@ -116,132 +82,6 @@ public void reset() throws IOException {
public void clear() {
handles.clear();
- identityHandles.clear();
handlesText.clear();
}
-
- /**
- * Lightweight identity hash table which maps objects to integer handles,
- * assigned in ascending order.
- */
- private static class HandleTable {
-
- /* number of mappings in table/next available handle */
- private int size;
- /* size threshold determining when to expand hash spine */
- private int threshold;
- /* factor for computing size threshold */
- private final float loadFactor;
- /* maps hash value -> candidate handle value */
- private int[] spine;
- /* maps handle value -> next candidate handle value */
- private int[] next;
- /* maps handle value -> associated object */
- private Object[] objs;
-
- /**
- * Creates new HandleTable with given capacity and load factor.
- */
- HandleTable(int initialCapacity, float loadFactor) {
- this.loadFactor = loadFactor;
- spine = new int[initialCapacity];
- next = new int[initialCapacity];
- objs = new Object[initialCapacity];
- threshold = (int) (initialCapacity * loadFactor);
- clear();
- }
-
- /**
- * Assigns next available handle to given object, and returns handle
- * value. Handles are assigned in ascending order starting at 0.
- */
- int assign(Object obj) {
- if (size >= next.length) {
- growEntries();
- }
- if (size >= threshold) {
- growSpine();
- }
- insert(obj, size);
- return size++;
- }
-
- /**
- * Looks up and returns handle associated with given object, or -1 if
- * no mapping found.
- */
- int lookup(Object obj) {
- if (size == 0) {
- return -1;
- }
- int index = hash(obj) % spine.length;
- for (int i = spine[index]; i >= 0; i = next[i]) {
- if (objs[i] == obj) {
- return i;
- }
- }
- return -1;
- }
-
- /**
- * Resets table to its initial (empty) state.
- */
- void clear() {
- Arrays.fill(spine, -1);
- Arrays.fill(objs, 0, size, null);
- size = 0;
- }
-
- /**
- * Returns the number of mappings currently in table.
- */
- int size() {
- return size;
- }
-
- /**
- * Inserts mapping object -> handle mapping into table. Assumes table
- * is large enough to accommodate new mapping.
- */
- private void insert(Object obj, int handle) {
- int index = hash(obj) % spine.length;
- objs[handle] = obj;
- next[handle] = spine[index];
- spine[index] = handle;
- }
-
- /**
- * Expands the hash "spine" -- equivalent to increasing the number of
- * buckets in a conventional hash table.
- */
- private void growSpine() {
- spine = new int[(spine.length << 1) + 1];
- threshold = (int) (spine.length * loadFactor);
- Arrays.fill(spine, -1);
- for (int i = 0; i < size; i++) {
- insert(objs[i], i);
- }
- }
-
- /**
- * Increases hash table capacity by lengthening entry arrays.
- */
- private void growEntries() {
- int newLength = (next.length << 1) + 1;
- int[] newNext = new int[newLength];
- System.arraycopy(next, 0, newNext, 0, size);
- next = newNext;
-
- Object[] newObjs = new Object[newLength];
- System.arraycopy(objs, 0, newObjs, 0, size);
- objs = newObjs;
- }
-
- /**
- * Returns hash value for given object.
- */
- private int hash(Object obj) {
- return System.identityHashCode(obj) & 0x7FFFFFFF;
- }
- }
}
diff --git a/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java
index f7cbd65de522b..e2303017ca66a 100644
--- a/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java
+++ b/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java
@@ -245,6 +245,14 @@ public String readOptionalString() throws IOException {
return null;
}
+ @Nullable
+ public String readOptionalSharedString() throws IOException {
+ if (readBoolean()) {
+ return readSharedString();
+ }
+ return null;
+ }
+
public String readString() throws IOException {
int charCount = readVInt();
char[] chars = CachedStreamInput.getCharArray(charCount);
@@ -274,6 +282,10 @@ public String readString() throws IOException {
return new String(chars, 0, charCount);
}
+ public String readSharedString() throws IOException {
+ return readString();
+ }
+
public final float readFloat() throws IOException {
return Float.intBitsToFloat(readInt());
@@ -384,14 +396,14 @@ public Object readGenericValue() throws IOException {
int size9 = readVInt();
Map map9 = new LinkedHashMap(size9);
for (int i = 0; i < size9; i++) {
- map9.put(readString(), readGenericValue());
+ map9.put(readSharedString(), readGenericValue());
}
return map9;
case 10:
int size10 = readVInt();
Map map10 = new HashMap(size10);
for (int i = 0; i < size10; i++) {
- map10.put(readString(), readGenericValue());
+ map10.put(readSharedString(), readGenericValue());
}
return map10;
case 11:
diff --git a/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java
index ff66e149f69df..d754d251f5486 100644
--- a/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java
+++ b/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java
@@ -176,6 +176,15 @@ public void writeOptionalString(@Nullable String str) throws IOException {
}
}
+ public void writeOptionalSharedString(@Nullable String str) throws IOException {
+ if (str == null) {
+ writeBoolean(false);
+ } else {
+ writeBoolean(true);
+ writeSharedString(str);
+ }
+ }
+
public void writeOptionalText(@Nullable Text text) throws IOException {
if (text == null) {
writeInt(-1);
@@ -234,6 +243,10 @@ public void writeString(String str) throws IOException {
}
}
+ public void writeSharedString(String str) throws IOException {
+ writeString(str);
+ }
+
public void writeFloat(float v) throws IOException {
writeInt(Float.floatToIntBits(v));
}
@@ -360,7 +373,7 @@ public void writeGenericValue(@Nullable Object value) throws IOException {
Map<String, Object> map = (Map<String, Object>) value;
writeVInt(map.size());
for (Map.Entry<String, Object> entry : map.entrySet()) {
- writeString(entry.getKey());
+ writeSharedString(entry.getKey());
writeGenericValue(entry.getValue());
}
} else if (type == Byte.class) {
diff --git a/src/main/java/org/elasticsearch/index/get/GetResult.java b/src/main/java/org/elasticsearch/index/get/GetResult.java
index fee9e1e873fed..7414eae1320cf 100644
--- a/src/main/java/org/elasticsearch/index/get/GetResult.java
+++ b/src/main/java/org/elasticsearch/index/get/GetResult.java
@@ -46,21 +46,13 @@
public class GetResult implements Streamable, Iterable<GetField>, ToXContent {
private String index;
-
private String type;
-
private String id;
-
private long version;
-
private boolean exists;
-
private Map<String, GetField> fields;
-
private Map<String, Object> sourceAsMap;
-
private BytesReference source;
-
private byte[] sourceAsBytes;
GetResult() {
@@ -275,8 +267,8 @@ public static GetResult readGetResult(StreamInput in) throws IOException {
@Override
public void readFrom(StreamInput in) throws IOException {
- index = in.readString();
- type = in.readOptionalString();
+ index = in.readSharedString();
+ type = in.readOptionalSharedString();
id = in.readString();
version = in.readLong();
exists = in.readBoolean();
@@ -300,8 +292,8 @@ public void readFrom(StreamInput in) throws IOException {
@Override
public void writeTo(StreamOutput out) throws IOException {
- out.writeString(index);
- out.writeOptionalString(type);
+ out.writeSharedString(index);
+ out.writeOptionalSharedString(type);
out.writeString(id);
out.writeLong(version);
out.writeBoolean(exists);
diff --git a/src/main/java/org/elasticsearch/search/highlight/HighlightField.java b/src/main/java/org/elasticsearch/search/highlight/HighlightField.java
index 8c28214a3eca7..69ccefd14f1d5 100644
--- a/src/main/java/org/elasticsearch/search/highlight/HighlightField.java
+++ b/src/main/java/org/elasticsearch/search/highlight/HighlightField.java
@@ -86,7 +86,7 @@ public static HighlightField readHighlightField(StreamInput in) throws IOExcepti
@Override
public void readFrom(StreamInput in) throws IOException {
- name = in.readString();
+ name = in.readSharedString();
if (in.readBoolean()) {
int size = in.readVInt();
if (size == 0) {
@@ -102,7 +102,7 @@ public void readFrom(StreamInput in) throws IOException {
@Override
public void writeTo(StreamOutput out) throws IOException {
- out.writeString(name);
+ out.writeSharedString(name);
if (fragments == null) {
out.writeBoolean(false);
} else {
diff --git a/src/main/java/org/elasticsearch/search/internal/InternalSearchHitField.java b/src/main/java/org/elasticsearch/search/internal/InternalSearchHitField.java
index 4e2a486248259..114593c094fd0 100644
--- a/src/main/java/org/elasticsearch/search/internal/InternalSearchHitField.java
+++ b/src/main/java/org/elasticsearch/search/internal/InternalSearchHitField.java
@@ -91,7 +91,7 @@ public static InternalSearchHitField readSearchHitField(StreamInput in) throws I
@Override
public void readFrom(StreamInput in) throws IOException {
- name = in.readString();
+ name = in.readSharedString();
int size = in.readVInt();
values = new ArrayList<Object>(size);
for (int i = 0; i < size; i++) {
@@ -101,7 +101,7 @@ public void readFrom(StreamInput in) throws IOException {
@Override
public void writeTo(StreamOutput out) throws IOException {
- out.writeString(name);
+ out.writeSharedString(name);
out.writeVInt(values.size());
for (Object value : values) {
out.writeGenericValue(value);
diff --git a/src/test/java/org/elasticsearch/test/unit/common/io/StreamsTests.java b/src/test/java/org/elasticsearch/test/unit/common/io/StreamsTests.java
index 8f9b05a2f7608..21ba689c3012a 100644
--- a/src/test/java/org/elasticsearch/test/unit/common/io/StreamsTests.java
+++ b/src/test/java/org/elasticsearch/test/unit/common/io/StreamsTests.java
@@ -19,27 +19,18 @@
package org.elasticsearch.test.unit.common.io;
-import static org.elasticsearch.common.io.Streams.copy;
-import static org.elasticsearch.common.io.Streams.copyToByteArray;
-import static org.elasticsearch.common.io.Streams.copyToString;
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.hamcrest.Matchers.equalTo;
+import com.google.common.base.Charsets;
+import org.testng.annotations.Test;
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.StringReader;
-import java.io.StringWriter;
+import java.io.*;
import java.util.Arrays;
-import org.testng.annotations.Test;
-
-import com.google.common.base.Charsets;
+import static org.elasticsearch.common.io.Streams.*;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
/**
* Unit tests for {@link org.elasticsearch.common.io.Streams}.
- *
- *
*/
public class StreamsTests {
diff --git a/src/test/java/org/elasticsearch/test/unit/common/io/streams/HandlesStreamsTests.java b/src/test/java/org/elasticsearch/test/unit/common/io/streams/HandlesStreamsTests.java
index 7af738a7da10e..f1aa52c946886 100644
--- a/src/test/java/org/elasticsearch/test/unit/common/io/streams/HandlesStreamsTests.java
+++ b/src/test/java/org/elasticsearch/test/unit/common/io/streams/HandlesStreamsTests.java
@@ -26,7 +26,7 @@
import org.testng.annotations.Test;
import static org.hamcrest.MatcherAssert.assertThat;
-import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.*;
/**
*
@@ -35,24 +35,46 @@
public class HandlesStreamsTests {
@Test
- public void testSharedUTFHandles() throws Exception {
- BytesStreamOutput bytesOut = new BytesStreamOutput();
- HandlesStreamOutput out = new HandlesStreamOutput(bytesOut, 5);
- String lowerThresholdValue = "test";
- String higherThresholdValue = "something that is higher than 5";
- out.writeString(lowerThresholdValue);
- out.writeString(higherThresholdValue);
- out.writeInt(1);
- out.writeString("else");
- out.writeString(higherThresholdValue);
- out.writeString(lowerThresholdValue);
-
- HandlesStreamInput in = new HandlesStreamInput(new BytesStreamInput(bytesOut.bytes().toBytes(), false));
- assertThat(in.readString(), equalTo(lowerThresholdValue));
- assertThat(in.readString(), equalTo(higherThresholdValue));
- assertThat(in.readInt(), equalTo(1));
- assertThat(in.readString(), equalTo("else"));
- assertThat(in.readString(), equalTo(higherThresholdValue));
- assertThat(in.readString(), equalTo(lowerThresholdValue));
+ public void testSharedStringHandles() throws Exception {
+ String test1 = "test1";
+ String test2 = "test2";
+ String test3 = "test3";
+ String test4 = "test4";
+ String test5 = "test5";
+ String test6 = "test6";
+
+ BytesStreamOutput bout = new BytesStreamOutput();
+ HandlesStreamOutput out = new HandlesStreamOutput(bout);
+ out.writeString(test1);
+ out.writeString(test1);
+ out.writeString(test2);
+ out.writeString(test3);
+ out.writeSharedString(test4);
+ out.writeSharedString(test4);
+ out.writeSharedString(test5);
+ out.writeSharedString(test6);
+
+ BytesStreamInput bin = new BytesStreamInput(bout.bytes());
+ HandlesStreamInput in = new HandlesStreamInput(bin);
+ String s1 = in.readString();
+ String s2 = in.readString();
+ String s3 = in.readString();
+ String s4 = in.readString();
+ String s5 = in.readSharedString();
+ String s6 = in.readSharedString();
+ String s7 = in.readSharedString();
+ String s8 = in.readSharedString();
+
+ assertThat(s1, equalTo(test1));
+ assertThat(s2, equalTo(test1));
+ assertThat(s3, equalTo(test2));
+ assertThat(s4, equalTo(test3));
+ assertThat(s5, equalTo(test4));
+ assertThat(s6, equalTo(test4));
+ assertThat(s7, equalTo(test5));
+ assertThat(s8, equalTo(test6));
+
+ assertThat(s1, not(sameInstance(s2)));
+ assertThat(s5, sameInstance(s6));
}
}
|
4d18b52cfa95619f1f582735f6aa187279bf6fd3
|
drools
|
JBRULES-1268 Activation.remove() is removing the- WRONG activation when used in events -the queue is set to null after dequeue--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@15781 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-core/src/main/java/org/drools/common/AgendaItem.java b/drools-core/src/main/java/org/drools/common/AgendaItem.java
index ff1c6e3a4e3..2bd5d0ea4cf 100644
--- a/drools-core/src/main/java/org/drools/common/AgendaItem.java
+++ b/drools-core/src/main/java/org/drools/common/AgendaItem.java
@@ -222,6 +222,7 @@ public void dequeue() {
if ( this.queue != null ) {
// will only be null if the AgendaGroup is locked when the activation add was attempted
this.queue.dequeue( this.index );
+ this.queue = null;
}
this.activated = false;
}
|
f95ec3f5bf12bee07c90943cff3b135e6a7e7a8b
|
hadoop
|
HADOOP-6133. Add a caching layer to- Configuration::getClassByName to alleviate a performance regression- introduced in a compatibility layer. Contributed by Todd Lipcon--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@812285 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/CHANGES.txt b/CHANGES.txt
index b3e5b2e07573c..a32e6c7905614 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -512,7 +512,8 @@ Trunk (unreleased changes)
HADOOP-6176. Add a couple package private methods to AccessTokenHandler
for testing. (Kan Zhang via szetszwo)
- HADOOP-6182. Fix ReleaseAudit warnings (Giridharan Kesavan and Lee Tucker via gkesavan)
+ HADOOP-6182. Fix ReleaseAudit warnings (Giridharan Kesavan and Lee Tucker
+ via gkesavan)
HADOOP-6173. Change src/native/packageNativeHadoop.sh to package all
native library files. (Hong Tang via szetszwo)
@@ -526,6 +527,10 @@ Trunk (unreleased changes)
HADOOP-6231. Allow caching of filesystem instances to be disabled on a
per-instance basis. (tomwhite)
+ HADOOP-6133. Add a caching layer to Configuration::getClassByName to
+ alleviate a performance regression introduced in a compatibility layer.
+ (Todd Lipcon via cdouglas)
+
OPTIMIZATIONS
HADOOP-5595. NameNode does not need to run a replicator to choose a
diff --git a/src/java/org/apache/hadoop/conf/Configuration.java b/src/java/org/apache/hadoop/conf/Configuration.java
index cfb1ba8d70a54..8bf4c1c436eb2 100644
--- a/src/java/org/apache/hadoop/conf/Configuration.java
+++ b/src/java/org/apache/hadoop/conf/Configuration.java
@@ -170,6 +170,9 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
*/
private static final ArrayList<String> defaultResources =
new ArrayList<String>();
+
+ private static final Map<ClassLoader, Map<String, Class<?>>>
+ CACHE_CLASSES = new WeakHashMap<ClassLoader, Map<String, Class<?>>>();
/**
* Flag to indicate if the storage of resource which updates a key needs
@@ -1029,7 +1032,27 @@ public void setStrings(String name, String... values) {
* @throws ClassNotFoundException if the class is not found.
*/
public Class<?> getClassByName(String name) throws ClassNotFoundException {
- return Class.forName(name, true, classLoader);
+ Map<String, Class<?>> map;
+
+ synchronized (CACHE_CLASSES) {
+ map = CACHE_CLASSES.get(classLoader);
+ if (map == null) {
+ map = Collections.synchronizedMap(
+ new WeakHashMap<String, Class<?>>());
+ CACHE_CLASSES.put(classLoader, map);
+ }
+ }
+
+ Class clazz = map.get(name);
+ if (clazz == null) {
+ clazz = Class.forName(name, true, classLoader);
+ if (clazz != null) {
+ // two putters can race here, but they'll put the same class
+ map.put(name, clazz);
+ }
+ }
+
+ return clazz;
}
/**
|
24a996801b15e26ee749194977b5995d7b00c485
|
camel
|
CAMEL-2445 applied patch with thanks to Stan and- Jeff--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@906342 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/main/java/org/apache/camel/processor/BatchProcessor.java b/camel-core/src/main/java/org/apache/camel/processor/BatchProcessor.java
index 0f7e42c4e4c3d..e780e2a7f942e 100644
--- a/camel-core/src/main/java/org/apache/camel/processor/BatchProcessor.java
+++ b/camel-core/src/main/java/org/apache/camel/processor/BatchProcessor.java
@@ -27,6 +27,7 @@
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
+import org.apache.camel.CamelException;
import org.apache.camel.Exchange;
import org.apache.camel.Navigate;
import org.apache.camel.Processor;
@@ -297,8 +298,12 @@ public void run() {
try {
try {
sendExchanges();
- } catch (Exception e) {
- getExceptionHandler().handleException(e);
+ } catch (Throwable t) {
+ if (t instanceof Exception) {
+ getExceptionHandler().handleException(t);
+ } else {
+ getExceptionHandler().handleException(new CamelException(t));
+ }
}
} finally {
queueLock.lock();
diff --git a/camel-core/src/test/java/org/apache/camel/processor/aggregator/AggregatorExceptionTest.java b/camel-core/src/test/java/org/apache/camel/processor/aggregator/AggregatorExceptionTest.java
new file mode 100644
index 0000000000000..1bea0a7436392
--- /dev/null
+++ b/camel-core/src/test/java/org/apache/camel/processor/aggregator/AggregatorExceptionTest.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.processor.aggregator;
+
+import org.apache.camel.ContextTestSupport;
+import org.apache.camel.Exchange;
+import org.apache.camel.Processor;
+import org.apache.camel.builder.RouteBuilder;
+import org.apache.camel.component.mock.MockEndpoint;
+
+public class AggregatorExceptionTest extends ContextTestSupport {
+
+ public void testAggregateAndOnException() throws Exception {
+ MockEndpoint mock = getMockEndpoint("mock:error");
+
+ // can change this to 5 when BatchProcessor's exception handling works properly
+ mock.expectedMessageCount(0);
+
+ for (int c = 0; c <= 10; c++) {
+ template.sendBodyAndHeader("seda:start", "Hi!", "id", 123);
+ }
+ assertMockEndpointsSatisfied();
+ }
+
+ @Override
+ protected RouteBuilder createRouteBuilder() throws Exception {
+ return new RouteBuilder() {
+ @Override
+ public void configure() throws Exception {
+
+ final String exceptionString = "This is an Error not an Exception";
+
+ //errorHandler(deadLetterChannel("mock:error"));
+ onException(Throwable.class).handled(true).to("mock:error");
+
+ from("seda:start")
+ .aggregate(header("id"))
+ .batchSize(2)
+ .process(new Processor() {
+ public void process(Exchange exchange) throws Exception {
+ throw new java.lang.NoSuchMethodError(exceptionString);
+ }
+ });
+ }
+ };
+ }
+}
|
e83ec1017bf61d352ef8e872984a79002c2020b0
|
kotlin
|
Reflection support--
|
a
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/backend-common/src/org/jetbrains/kotlin/backend/common/CodegenUtil.java b/compiler/backend-common/src/org/jetbrains/kotlin/backend/common/CodegenUtil.java
index 2599c11fa137b..4b5dc88d1c932 100644
--- a/compiler/backend-common/src/org/jetbrains/kotlin/backend/common/CodegenUtil.java
+++ b/compiler/backend-common/src/org/jetbrains/kotlin/backend/common/CodegenUtil.java
@@ -19,6 +19,7 @@
import com.intellij.openapi.editor.Document;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
+import kotlin.KotlinPackage;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.kotlin.backend.common.bridges.BridgesPackage;
diff --git a/compiler/backend/src/org/jetbrains/kotlin/codegen/ClassFileFactory.java b/compiler/backend/src/org/jetbrains/kotlin/codegen/ClassFileFactory.java
index 181981b3462aa..ab8c235196e05 100644
--- a/compiler/backend/src/org/jetbrains/kotlin/codegen/ClassFileFactory.java
+++ b/compiler/backend/src/org/jetbrains/kotlin/codegen/ClassFileFactory.java
@@ -38,6 +38,8 @@
import java.io.UnsupportedEncodingException;
import java.util.*;
+import static org.jetbrains.kotlin.codegen.JvmCodegenUtil.getMappingFileName;
+
public class ClassFileFactory implements OutputFileCollection {
private final GenerationState state;
private final ClassBuilderFactory builderFactory;
@@ -86,7 +88,7 @@ void done() {
private void writeModuleMappings(Collection<PackageCodegen> values) {
String moduleName = KotlinPackage.removeSurrounding(state.getModule().getName().asString(), "<", ">");
- String outputFilePath = "META-INF/" + moduleName + ".kotlin_module";
+ String outputFilePath = getMappingFileName(moduleName);
final StringWriter moduleMapping = new StringWriter(1024);
for (PackageCodegen codegen : values) {
codegen.getFacades().serialize(moduleMapping);
diff --git a/compiler/backend/src/org/jetbrains/kotlin/codegen/GeneratedClassLoader.java b/compiler/backend/src/org/jetbrains/kotlin/codegen/GeneratedClassLoader.java
index 14282bd82ba77..3b346ef7f1336 100644
--- a/compiler/backend/src/org/jetbrains/kotlin/codegen/GeneratedClassLoader.java
+++ b/compiler/backend/src/org/jetbrains/kotlin/codegen/GeneratedClassLoader.java
@@ -19,6 +19,8 @@
import org.jetbrains.annotations.NotNull;
import org.jetbrains.kotlin.backend.common.output.OutputFile;
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.List;
@@ -32,6 +34,15 @@ public GeneratedClassLoader(@NotNull ClassFileFactory factory, ClassLoader paren
this.factory = factory;
}
+ @Override
+ public InputStream getResourceAsStream(String name) {
+ OutputFile outputFile = factory.get(name);
+ if (outputFile != null) {
+ return new ByteArrayInputStream(outputFile.asByteArray());
+ }
+ return super.getResourceAsStream(name);
+ }
+
@NotNull
@Override
protected Class<?> findClass(@NotNull String name) throws ClassNotFoundException {
diff --git a/compiler/backend/src/org/jetbrains/kotlin/codegen/JvmCodegenUtil.java b/compiler/backend/src/org/jetbrains/kotlin/codegen/JvmCodegenUtil.java
index 2a2e0a5a521a0..7297aa1c4753c 100644
--- a/compiler/backend/src/org/jetbrains/kotlin/codegen/JvmCodegenUtil.java
+++ b/compiler/backend/src/org/jetbrains/kotlin/codegen/JvmCodegenUtil.java
@@ -245,4 +245,13 @@ public static boolean shouldUseJavaClassForClassLiteral(@NotNull ClassifierDescr
module == module.getBuiltIns().getBuiltInsModule() ||
DescriptorUtils.isAnnotationClass(descriptor);
}
+
+ public static String getModuleName(ModuleDescriptor module) {
+ return KotlinPackage.removeSurrounding(module.getName().asString(), "<", ">");
+ }
+
+ @NotNull
+ public static String getMappingFileName(@NotNull String moduleName) {
+ return "META-INF/" + moduleName + ".kotlin_module";
+ }
}
diff --git a/compiler/backend/src/org/jetbrains/kotlin/codegen/MemberCodegen.java b/compiler/backend/src/org/jetbrains/kotlin/codegen/MemberCodegen.java
index 8d04c8bd80f97..80cadd4c5437e 100644
--- a/compiler/backend/src/org/jetbrains/kotlin/codegen/MemberCodegen.java
+++ b/compiler/backend/src/org/jetbrains/kotlin/codegen/MemberCodegen.java
@@ -467,6 +467,9 @@ public static void generateReflectionObjectField(
if (state.getClassBuilderMode() == ClassBuilderMode.LIGHT_CLASSES) return;
v.aconst(thisAsmType);
+ if (factory.getArgumentTypes().length == 2) {
+ v.aconst(JvmCodegenUtil.getModuleName(state.getModule()));
+ }
v.invokestatic(REFLECTION, factory.getName(), factory.getDescriptor(), false);
v.putstatic(thisAsmType.getInternalName(), fieldName, type);
}
diff --git a/compiler/backend/src/org/jetbrains/kotlin/codegen/PackageCodegen.java b/compiler/backend/src/org/jetbrains/kotlin/codegen/PackageCodegen.java
index 4f4998b699ff6..1ca57d90a746b 100644
--- a/compiler/backend/src/org/jetbrains/kotlin/codegen/PackageCodegen.java
+++ b/compiler/backend/src/org/jetbrains/kotlin/codegen/PackageCodegen.java
@@ -260,7 +260,7 @@ private void generatePackageFacadeClass(
private void generateKotlinPackageReflectionField() {
MethodVisitor mv = v.newMethod(NO_ORIGIN, ACC_STATIC, "<clinit>", "()V", null, null);
- Method method = method("createKotlinPackage", K_PACKAGE_TYPE, getType(Class.class));
+ Method method = method("createKotlinPackage", K_PACKAGE_TYPE, getType(Class.class), getType(String.class));
InstructionAdapter iv = new InstructionAdapter(mv);
MemberCodegen.generateReflectionObjectField(state, packageClassType, v, method, JvmAbi.KOTLIN_PACKAGE_FIELD_NAME, iv);
iv.areturn(Type.VOID_TYPE);
diff --git a/compiler/tests/org/jetbrains/kotlin/jvm/runtime/AbstractJvmRuntimeDescriptorLoaderTest.kt b/compiler/tests/org/jetbrains/kotlin/jvm/runtime/AbstractJvmRuntimeDescriptorLoaderTest.kt
index 1672b1fbb92a8..a0f50d4390ce6 100644
--- a/compiler/tests/org/jetbrains/kotlin/jvm/runtime/AbstractJvmRuntimeDescriptorLoaderTest.kt
+++ b/compiler/tests/org/jetbrains/kotlin/jvm/runtime/AbstractJvmRuntimeDescriptorLoaderTest.kt
@@ -82,7 +82,7 @@ public abstract class AbstractJvmRuntimeDescriptorLoaderTest : TestCaseWithTmpdi
val classLoader = URLClassLoader(arrayOf(tmpdir.toURI().toURL()), ForTestCompileRuntime.runtimeAndReflectJarClassLoader())
- val actual = createReflectedPackageView(classLoader)
+ val actual = createReflectedPackageView(classLoader, "")
val expected = LoadDescriptorUtil.loadTestPackageAndBindingContextFromJavaRoot(
tmpdir, getTestRootDisposable(), jdkKind, ConfigurationKind.ALL, true
@@ -129,8 +129,8 @@ public abstract class AbstractJvmRuntimeDescriptorLoaderTest : TestCaseWithTmpdi
}
}
- private fun createReflectedPackageView(classLoader: URLClassLoader): SyntheticPackageViewForTest {
- val module = RuntimeModuleData.create(classLoader).module
+ private fun createReflectedPackageView(classLoader: URLClassLoader, moduleName: String): SyntheticPackageViewForTest {
+ val module = RuntimeModuleData.create(classLoader, moduleName).module
val generatedPackageDir = File(tmpdir, LoadDescriptorUtil.TEST_PACKAGE_FQNAME.pathSegments().single().asString())
diff --git a/core/descriptors.runtime/src/org/jetbrains/kotlin/load/kotlin/reflect/RuntimeModuleData.kt b/core/descriptors.runtime/src/org/jetbrains/kotlin/load/kotlin/reflect/RuntimeModuleData.kt
index d38a8485da684..6f68484c5a14a 100644
--- a/core/descriptors.runtime/src/org/jetbrains/kotlin/load/kotlin/reflect/RuntimeModuleData.kt
+++ b/core/descriptors.runtime/src/org/jetbrains/kotlin/load/kotlin/reflect/RuntimeModuleData.kt
@@ -44,7 +44,7 @@ public class RuntimeModuleData private constructor(public val deserialization: D
public val localClassResolver: LocalClassResolver get() = deserialization.localClassResolver
companion object {
- public fun create(classLoader: ClassLoader): RuntimeModuleData {
+ public fun create(classLoader: ClassLoader, moduleName: String?): RuntimeModuleData {
val storageManager = LockBasedStorageManager()
val module = ModuleDescriptorImpl(Name.special("<runtime module for $classLoader>"), storageManager,
ModuleParameters(listOf(), JavaToKotlinClassMap.INSTANCE))
@@ -57,8 +57,10 @@ public class RuntimeModuleData private constructor(public val deserialization: D
ExternalAnnotationResolver.EMPTY, ExternalSignatureResolver.DO_NOTHING, RuntimeErrorReporter, JavaResolverCache.EMPTY,
JavaPropertyInitializerEvaluator.DoNothing, SamConversionResolver, RuntimeSourceElementFactory, singleModuleClassResolver
)
+ println("moduleName $moduleName")
val lazyJavaPackageFragmentProvider =
- LazyJavaPackageFragmentProvider(globalJavaResolverContext, module, ReflectionTypes(module), PackageMappingProvider.EMPTY)
+ LazyJavaPackageFragmentProvider(globalJavaResolverContext, module, ReflectionTypes(module),
+ if (moduleName == null) PackageMappingProvider.EMPTY else RuntimePackageMappingProvider(moduleName, classLoader))
val javaDescriptorResolver = JavaDescriptorResolver(lazyJavaPackageFragmentProvider, module)
val javaClassDataFinder = JavaClassDataFinder(reflectKotlinClassFinder, deserializedDescriptorResolver)
val binaryClassAnnotationAndConstantLoader = BinaryClassAnnotationAndConstantLoaderImpl(module, storageManager, reflectKotlinClassFinder, RuntimeErrorReporter)
diff --git a/core/descriptors.runtime/src/org/jetbrains/kotlin/load/kotlin/reflect/RuntimePackageMappingProvider.kt b/core/descriptors.runtime/src/org/jetbrains/kotlin/load/kotlin/reflect/RuntimePackageMappingProvider.kt
new file mode 100644
index 0000000000000..867163bca17ef
--- /dev/null
+++ b/core/descriptors.runtime/src/org/jetbrains/kotlin/load/kotlin/reflect/RuntimePackageMappingProvider.kt
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2010-2015 JetBrains s.r.o.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.jetbrains.kotlin.load.kotlin.reflect
+
+import org.jetbrains.kotlin.load.java.lazy.PackageMappingProvider
+import org.jetbrains.kotlin.load.kotlin.ModuleMapping
+import org.jetbrains.kotlin.load.kotlin.PackageFacades
+import java.io.ByteArrayOutputStream
+
+class RuntimePackageMappingProvider(val moduleName: String, val classLoader : ClassLoader) : PackageMappingProvider {
+
+ val mapping: ModuleMapping by lazy {
+ print("finding metainf for $moduleName")
+ val resourceAsStream = classLoader.getResourceAsStream("META-INF/$moduleName.kotlin_module") ?: return@lazy ModuleMapping("")
+
+ print("OK")
+
+ try {
+ val out = ByteArrayOutputStream(4096);
+ val buffer = ByteArray(4096);
+ while (true) {
+ val r = resourceAsStream.read(buffer);
+ if (r == -1) break;
+ out.write(buffer, 0, r);
+ }
+
+ val ret = out.toByteArray();
+ return@lazy ModuleMapping(String(ret, "UTF-8"))
+ } finally {
+ resourceAsStream.close()
+ }
+ }
+
+ override fun findPackageMembers(packageName: String): List<String> {
+ print("finding $packageName")
+ return mapping.package2MiniFacades.getOrElse (packageName, { PackageFacades("default") }).parts.toList()
+ }
+}
\ No newline at end of file
diff --git a/core/reflection.jvm/src/kotlin/reflect/jvm/internal/KDeclarationContainerImpl.kt b/core/reflection.jvm/src/kotlin/reflect/jvm/internal/KDeclarationContainerImpl.kt
index d9a37fc547e51..287a51a57e913 100644
--- a/core/reflection.jvm/src/kotlin/reflect/jvm/internal/KDeclarationContainerImpl.kt
+++ b/core/reflection.jvm/src/kotlin/reflect/jvm/internal/KDeclarationContainerImpl.kt
@@ -39,7 +39,7 @@ import kotlin.reflect.KotlinReflectionInternalError
abstract class KDeclarationContainerImpl : ClassBasedDeclarationContainer {
// Note: this is stored here on a soft reference to prevent GC from destroying the weak reference to it in the moduleByClassLoader cache
val moduleData by ReflectProperties.lazySoft {
- jClass.getOrCreateModule()
+ jClass.getOrCreateModule(null)
}
abstract val constructorDescriptors: Collection<ConstructorDescriptor>
diff --git a/core/reflection.jvm/src/kotlin/reflect/jvm/internal/KPackageImpl.kt b/core/reflection.jvm/src/kotlin/reflect/jvm/internal/KPackageImpl.kt
index 33c507d8b9cdf..3e27cc9081ded 100644
--- a/core/reflection.jvm/src/kotlin/reflect/jvm/internal/KPackageImpl.kt
+++ b/core/reflection.jvm/src/kotlin/reflect/jvm/internal/KPackageImpl.kt
@@ -27,9 +27,9 @@ import kotlin.jvm.internal.KotlinPackage
import kotlin.reflect.KCallable
import kotlin.reflect.KPackage
-class KPackageImpl(override val jClass: Class<*>) : KDeclarationContainerImpl(), KPackage {
+class KPackageImpl(override val jClass: Class<*>, val moduleName: String) : KDeclarationContainerImpl(), KPackage {
val descriptor by ReflectProperties.lazySoft {
- val moduleData = jClass.getOrCreateModule()
+ val moduleData = jClass.getOrCreateModule(moduleName)
val fqName = jClass.classId.getPackageFqName()
moduleData.module.getPackage(fqName)
diff --git a/core/reflection.jvm/src/kotlin/reflect/jvm/internal/ReflectionFactoryImpl.java b/core/reflection.jvm/src/kotlin/reflect/jvm/internal/ReflectionFactoryImpl.java
index 27f730367ef7b..b3a7eff722085 100644
--- a/core/reflection.jvm/src/kotlin/reflect/jvm/internal/ReflectionFactoryImpl.java
+++ b/core/reflection.jvm/src/kotlin/reflect/jvm/internal/ReflectionFactoryImpl.java
@@ -32,7 +32,12 @@ public KClass createKotlinClass(Class javaClass) {
@Override
public KPackage createKotlinPackage(Class javaClass) {
- return new KPackageImpl(javaClass);
+ return createKotlinPackage(javaClass, "undefined");
+ }
+
+ @Override
+ public KPackage createKotlinPackage(Class javaClass, String moduleName) {
+ return new KPackageImpl(javaClass, moduleName);
}
@Override
diff --git a/core/reflection.jvm/src/kotlin/reflect/jvm/internal/moduleByClassLoader.kt b/core/reflection.jvm/src/kotlin/reflect/jvm/internal/moduleByClassLoader.kt
index e4fb26a3e9435..5d274af7c2544 100644
--- a/core/reflection.jvm/src/kotlin/reflect/jvm/internal/moduleByClassLoader.kt
+++ b/core/reflection.jvm/src/kotlin/reflect/jvm/internal/moduleByClassLoader.kt
@@ -44,7 +44,7 @@ private class WeakClassLoaderBox(classLoader: ClassLoader) {
ref.get()?.let { it.toString() } ?: "<null>"
}
-internal fun Class<*>.getOrCreateModule(): RuntimeModuleData {
+internal fun Class<*>.getOrCreateModule(moduleName: String?): RuntimeModuleData {
val classLoader = this.safeClassLoader
val key = WeakClassLoaderBox(classLoader)
@@ -54,7 +54,7 @@ internal fun Class<*>.getOrCreateModule(): RuntimeModuleData {
moduleByClassLoader.remove(key, cached)
}
- val module = RuntimeModuleData.create(classLoader)
+ val module = RuntimeModuleData.create(classLoader, moduleName)
try {
while (true) {
val ref = moduleByClassLoader.putIfAbsent(key, WeakReference(module))
diff --git a/core/reflection.jvm/src/kotlin/reflect/jvm/mapping.kt b/core/reflection.jvm/src/kotlin/reflect/jvm/mapping.kt
index 765e23fb2537e..4b8d90d7a3af6 100644
--- a/core/reflection.jvm/src/kotlin/reflect/jvm/mapping.kt
+++ b/core/reflection.jvm/src/kotlin/reflect/jvm/mapping.kt
@@ -94,7 +94,7 @@ public val KType.javaType: Type
*/
public val Class<*>.kotlinPackage: KPackage?
get() = if (getSimpleName().endsWith("Package") &&
- getAnnotation(javaClass<kotlin.jvm.internal.KotlinPackage>()) != null) KPackageImpl(this) else null
+ getAnnotation(javaClass<kotlin.jvm.internal.KotlinPackage>()) != null) KPackageImpl(this, "undefined") else null
/**
diff --git a/core/runtime.jvm/src/kotlin/jvm/internal/Reflection.java b/core/runtime.jvm/src/kotlin/jvm/internal/Reflection.java
index 26bfe9fa704bd..4cdf824e9d02f 100644
--- a/core/runtime.jvm/src/kotlin/jvm/internal/Reflection.java
+++ b/core/runtime.jvm/src/kotlin/jvm/internal/Reflection.java
@@ -55,6 +55,10 @@ public static KPackage createKotlinPackage(Class javaClass) {
return factory.createKotlinPackage(javaClass);
}
+ public static KPackage createKotlinPackage(Class javaClass, String moduleName) {
+ return factory.createKotlinPackage(javaClass, moduleName);
+ }
+
public static KClass foreignKotlinClass(Class javaClass) {
return factory.foreignKotlinClass(javaClass);
}
diff --git a/core/runtime.jvm/src/kotlin/jvm/internal/ReflectionFactory.java b/core/runtime.jvm/src/kotlin/jvm/internal/ReflectionFactory.java
index e6a2cc273b851..88aae99ba338e 100644
--- a/core/runtime.jvm/src/kotlin/jvm/internal/ReflectionFactory.java
+++ b/core/runtime.jvm/src/kotlin/jvm/internal/ReflectionFactory.java
@@ -27,6 +27,10 @@ public KPackage createKotlinPackage(Class javaClass) {
return null;
}
+ public KPackage createKotlinPackage(Class javaClass, String moduleName) {
+ return null;
+ }
+
public KClass foreignKotlinClass(Class javaClass) {
return new ClassReference(javaClass);
}
|
36875309b2c4e6276964e013819ba513e23eea24
|
ReactiveX-RxJava
|
Wraps DoOnEach in a SafeObserver--This commit leverages the SafeObserver facility to get the desired-behavior in the face of exceptions. Specifically, if any of the-operations performed within the doOnEach handler raises an exception,-that exception will propagate through the observable chain.-
|
c
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/operators/OperationDoOnEach.java b/rxjava-core/src/main/java/rx/operators/OperationDoOnEach.java
index acd841fc2f..1b0aafb578 100644
--- a/rxjava-core/src/main/java/rx/operators/OperationDoOnEach.java
+++ b/rxjava-core/src/main/java/rx/operators/OperationDoOnEach.java
@@ -24,23 +24,24 @@
* Converts the elements of an observable sequence to the specified type.
*/
public class OperationDoOnEach {
- public static <T> OnSubscribeFunc<T> doOnEach(Observable<? extends T> source, Observer<? super T> observer) {
- return new DoOnEachObservable<T>(source, observer);
+ public static <T> OnSubscribeFunc<T> doOnEach(Observable<? extends T> sequence, Observer<? super T> observer) {
+ return new DoOnEachObservable<T>(sequence, observer);
}
private static class DoOnEachObservable<T> implements OnSubscribeFunc<T> {
- private final Observable<? extends T> source;
+ private final Observable<? extends T> sequence;
private final Observer<? super T> doOnEachObserver;
- public DoOnEachObservable(Observable<? extends T> source, Observer<? super T> doOnEachObserver) {
- this.source = source;
+ public DoOnEachObservable(Observable<? extends T> sequence, Observer<? super T> doOnEachObserver) {
+ this.sequence = sequence;
this.doOnEachObserver = doOnEachObserver;
}
@Override
public Subscription onSubscribe(final Observer<? super T> observer) {
- return source.subscribe(new Observer<T>() {
+ final SafeObservableSubscription subscription = new SafeObservableSubscription();
+ return subscription.wrap(sequence.subscribe(new SafeObserver<T>(subscription, new Observer<T>() {
@Override
public void onCompleted() {
doOnEachObserver.onCompleted();
@@ -58,8 +59,7 @@ public void onNext(T value) {
doOnEachObserver.onNext(value);
observer.onNext(value);
}
-
- });
+ })));
}
}
diff --git a/rxjava-core/src/test/java/rx/operators/OperationDoOnEachTest.java b/rxjava-core/src/test/java/rx/operators/OperationDoOnEachTest.java
index 4bf017761c..6c1407ebea 100644
--- a/rxjava-core/src/test/java/rx/operators/OperationDoOnEachTest.java
+++ b/rxjava-core/src/test/java/rx/operators/OperationDoOnEachTest.java
@@ -36,6 +36,7 @@
import rx.concurrency.Schedulers;
import rx.util.functions.Func1;
import rx.util.functions.Func2;
+import rx.util.functions.Action1;
public class OperationDoOnEachTest {
@@ -104,5 +105,25 @@ public String call(String s) {
verify(sideEffectObserver, times(1)).onError(any(Throwable.class));
}
+ @Test
+ public void testDoOnEachWithErrorInCallback() {
+ Observable<String> base = Observable.from("one", "two", "fail", "three");
+ Observable<String> doOnEach = base.doOnEach(new Action1<String>() {
+ @Override
+ public void call(String s) {
+ if ("fail".equals(s)) {
+ throw new RuntimeException("Forced Failure");
+ }
+ }
+ });
+
+ doOnEach.subscribe(subscribedObserver);
+ verify(subscribedObserver, times(1)).onNext("one");
+ verify(subscribedObserver, times(1)).onNext("two");
+ verify(subscribedObserver, never()).onNext("three");
+ verify(subscribedObserver, never()).onCompleted();
+ verify(subscribedObserver, times(1)).onError(any(Throwable.class));
+
+ }
}
|
3181d96ec864a467d4259e31c64f2b7554afc3d4
|
hbase
|
HBASE-2397 Bytes.toStringBinary escapes printable- chars--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@951840 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index 280daa7fb5ce..8ad8e5601edf 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -23,6 +23,7 @@ Release 0.21.0 - Unreleased
HBASE-2541 Remove transactional contrib (Clint Morgan via Stack)
HBASE-2542 Fold stargate contrib into core
HBASE-2565 Remove contrib module from hbase
+ HBASE-2397 Bytes.toStringBinary escapes printable chars
BUG FIXES
HBASE-1791 Timeout in IndexRecordWriter (Bradford Stephens via Andrew
diff --git a/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
index bed859f48e62..1b46f2d892a4 100644
--- a/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
+++ b/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
@@ -320,16 +320,7 @@ public static String toStringBinary(final byte [] b, int off, int len) {
if ( (ch >= '0' && ch <= '9')
|| (ch >= 'A' && ch <= 'Z')
|| (ch >= 'a' && ch <= 'z')
- || ch == ','
- || ch == '_'
- || ch == '-'
- || ch == ':'
- || ch == ' '
- || ch == '<'
- || ch == '>'
- || ch == '='
- || ch == '/'
- || ch == '.') {
+ || " `~!@#$%^&*()-_=+[]{}\\|;:'\",.<>/?".indexOf(ch) >= 0 ) {
result.append(first.charAt(i));
} else {
result.append(String.format("\\x%02X", ch));
|
15fe100f951a1bba5817676c8d1f9a0fe03c1f65
|
restlet-framework-java
|
- Reference now verifies the validity of- characters before setting them. Suggested by Stephan Koops.--
|
a
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet/src/org/restlet/data/Reference.java b/modules/org.restlet/src/org/restlet/data/Reference.java
index e845037352..605e71129d 100644
--- a/modules/org.restlet/src/org/restlet/data/Reference.java
+++ b/modules/org.restlet/src/org/restlet/data/Reference.java
@@ -522,13 +522,14 @@ public Reference(Reference baseRef, Reference uriReference) {
*
* @param baseRef
* The base reference.
- * @param uriReference
+ * @param uriRef
* The URI reference, either absolute or relative.
*/
- public Reference(Reference baseRef, String uriReference) {
+ public Reference(Reference baseRef, String uriRef) {
+ checkValidity(uriRef);
this.baseRef = baseRef;
- this.internalRef = uriReference;
- internalUpdate();
+ this.internalRef = uriRef;
+ updateIndexes();
}
/**
@@ -649,6 +650,25 @@ public Reference addSegment(String value) {
return this;
}
+ /**
+ * Checks if all characters are valid.
+ *
+ * @param uriRef
+ * The URI reference to check.
+ */
+ private void checkValidity(String uriRef) throws IllegalArgumentException {
+ if (uriRef != null) {
+ // Ensure that all characters are valid
+ for (int i = 0; i < uriRef.length(); i++) {
+ if (!isValid(uriRef.charAt(i))) {
+ throw new IllegalArgumentException(
+ "Invalid character detected in URI reference at index '"
+ + i + "': \"" + uriRef.charAt(i) + "\"");
+ }
+ }
+ }
+ }
+
@Override
public Reference clone() {
Reference newRef = new Reference();
@@ -1749,6 +1769,8 @@ public void setBaseRef(String baseUri) {
* ('#').
*/
public void setFragment(String fragment) {
+ checkValidity(fragment);
+
if ((fragment != null) && (fragment.indexOf('#') != -1)) {
throw new IllegalArgumentException(
"Illegal '#' character detected in parameter");
@@ -1777,7 +1799,7 @@ public void setFragment(String fragment) {
}
}
- internalUpdate();
+ updateIndexes();
}
/**
@@ -1864,8 +1886,12 @@ public void setHostPort(Integer port) {
* delimiter ('#').
*/
public void setIdentifier(String identifier) {
- if (identifier == null)
+ checkValidity(identifier);
+
+ if (identifier == null) {
identifier = "";
+ }
+
if (identifier.indexOf('#') != -1) {
throw new IllegalArgumentException(
"Illegal '#' character detected in parameter");
@@ -1879,7 +1905,7 @@ public void setIdentifier(String identifier) {
this.internalRef = identifier;
}
- internalUpdate();
+ updateIndexes();
}
}
@@ -1968,6 +1994,8 @@ public void setProtocol(Protocol protocol) {
* The query component for hierarchical identifiers.
*/
public void setQuery(String query) {
+ checkValidity(query);
+
if (queryIndex != -1) {
// Query found
if (fragmentIndex != -1) {
@@ -2019,7 +2047,7 @@ public void setQuery(String query) {
}
}
- internalUpdate();
+ updateIndexes();
}
/**
@@ -2029,8 +2057,12 @@ public void setQuery(String query) {
* The relative part to set.
*/
public void setRelativePart(String relativePart) {
- if (relativePart == null)
+ checkValidity(relativePart);
+
+ if (relativePart == null) {
relativePart = "";
+ }
+
if (schemeIndex == -1) {
// This is a relative reference, no scheme found
if (queryIndex != -1) {
@@ -2047,7 +2079,7 @@ public void setRelativePart(String relativePart) {
}
}
- internalUpdate();
+ updateIndexes();
}
/**
@@ -2057,6 +2089,8 @@ public void setRelativePart(String relativePart) {
* The scheme component.
*/
public void setScheme(String scheme) {
+ checkValidity(scheme);
+
if (scheme != null) {
// URI specification indicates that scheme names should be
// produced in lower case
@@ -2082,7 +2116,7 @@ public void setScheme(String scheme) {
}
}
- internalUpdate();
+ updateIndexes();
}
/**
@@ -2092,8 +2126,12 @@ public void setScheme(String scheme) {
* The scheme specific part.
*/
public void setSchemeSpecificPart(String schemeSpecificPart) {
- if (schemeSpecificPart == null)
+ checkValidity(schemeSpecificPart);
+
+ if (schemeSpecificPart == null) {
schemeSpecificPart = "";
+ }
+
if (schemeIndex != -1) {
// Scheme found
if (fragmentIndex != -1) {
@@ -2120,7 +2158,7 @@ public void setSchemeSpecificPart(String schemeSpecificPart) {
}
}
- internalUpdate();
+ updateIndexes();
}
/**
@@ -2222,20 +2260,10 @@ public String toString(boolean query, boolean fragment) {
}
/**
- * Update internal indexes and check if all characters are valid.
+ * Updates internal indexes.
*/
- private void internalUpdate() {
+ private void updateIndexes() {
if (internalRef != null) {
- // Ensure that all characters are valid
- for (int i = 0; i < internalRef.length(); i++) {
- if (!isValid(internalRef.charAt(i))) {
- throw new IllegalArgumentException(
- "Invalid character detected in URI reference at index '"
- + i + "': \"" + internalRef.charAt(i)
- + "\"");
- }
- }
-
// Compute the indexes
int firstSlashIndex = this.internalRef.indexOf('/');
this.schemeIndex = this.internalRef.indexOf(':');
|
2d7b9660ef2825c25f2f3fbeda9acb96c3712575
|
ReactiveX-RxJava
|
Adding missing javadocs to TestSubject (-1322)--
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/subjects/TestSubject.java b/rxjava-core/src/main/java/rx/subjects/TestSubject.java
index 92a8f8df71..2145be6f7b 100644
--- a/rxjava-core/src/main/java/rx/subjects/TestSubject.java
+++ b/rxjava-core/src/main/java/rx/subjects/TestSubject.java
@@ -26,35 +26,20 @@
import rx.subjects.SubjectSubscriptionManager.SubjectObserver;
/**
- * Subject that, once an {@link Observer} has subscribed, publishes all subsequent events to the subscriber.
- * <p>
- * <img width="640" src="https://raw.github.com/wiki/Netflix/RxJava/images/rx-operators/S.PublishSubject.png">
- * <p>
- * Example usage:
- * <p>
- * <pre> {@code
-
- PublishSubject<Object> subject = PublishSubject.create();
- // observer1 will receive all onNext and onCompleted events
- subject.subscribe(observer1);
- subject.onNext("one");
- subject.onNext("two");
- // observer2 will only receive "three" and onCompleted
- subject.subscribe(observer2);
- subject.onNext("three");
- subject.onCompleted();
-
- } </pre>
- *
+ * A variety of Subject that is useful for testing purposes. It operates on a {@link TestScheduler} and allows
+ * you to precisely time emissions and notifications to the Subject's subscribers.
+ *
* @param <T>
* the type of item observed by and emitted by the subject
- * @warn javadoc seems misleading
*/
public final class TestSubject<T> extends Subject<T, T> {
/**
- * @warn javadoc missing
- * @return
+ * Creates and returns a new {@code TestSubject}.
+ *
+ * @param <T> the value type
+ * @param scheduler a {@link TestScheduler} on which to operate this Subject
+ * @return the new {@code TestSubject}
*/
public static <T> TestSubject<T> create(TestScheduler scheduler) {
final SubjectSubscriptionManager<T> state = new SubjectSubscriptionManager<T>();
@@ -95,8 +80,11 @@ private void _onCompleted() {
}
/**
- * @warn javadoc missing
+ * Schedule a call to the {@code onCompleted} methods of all of the subscribers to this Subject to begin at
+ * a particular time.
+ *
* @param timeInMilliseconds
+ * the time at which to begin calling the {@code onCompleted} methods of the subscribers
*/
public void onCompleted(long timeInMilliseconds) {
innerScheduler.schedule(new Action0() {
@@ -123,9 +111,13 @@ private void _onError(final Throwable e) {
}
/**
- * @warn javadoc missing
+ * Schedule a call to the {@code onError} methods of all of the subscribers to this Subject to begin at
+ * a particular time.
+ *
* @param e
+ * the {@code Throwable} to pass to the {@code onError} methods of the subscribers
* @param timeInMilliseconds
+ * the time at which to begin calling the {@code onError} methods of the subscribers
*/
public void onError(final Throwable e, long timeInMilliseconds) {
innerScheduler.schedule(new Action0() {
@@ -150,9 +142,13 @@ private void _onNext(T v) {
}
/**
- * @warn javadoc missing
+ * Emit an item to all of the subscribers to this Subject at a particular time.
+ *
* @param v
+ * the item to emit
* @param timeInMilliseconds
+ * the time at which to begin calling the {@code onNext} methods of the subscribers in order to emit
+ * the item
*/
public void onNext(final T v, long timeInMilliseconds) {
innerScheduler.schedule(new Action0() {
|
35889c08e997c54194471256e0760ee29a54bafe
|
kotlin
|
Test data paths fixed--
|
c
|
https://github.com/JetBrains/kotlin
|
diff --git a/j2k/test/org/jetbrains/jet/j2k/StandaloneJavaToKotlinConverterTest.java b/j2k/test/org/jetbrains/jet/j2k/StandaloneJavaToKotlinConverterTest.java
index a168d150a4654..d153c24160160 100644
--- a/j2k/test/org/jetbrains/jet/j2k/StandaloneJavaToKotlinConverterTest.java
+++ b/j2k/test/org/jetbrains/jet/j2k/StandaloneJavaToKotlinConverterTest.java
@@ -14,8 +14,6 @@
import java.io.File;
import java.io.IOException;
-import static org.jetbrains.jet.j2k.TestCaseBuilder.getTestDataPathBase;
-
/**
* @author ignatov
*/
@@ -33,7 +31,7 @@ public StandaloneJavaToKotlinConverterTest(String dataPath, String name) {
@Override
protected void runTest() throws Throwable {
- String javaPath = "testData" + File.separator + getTestFilePath();
+ String javaPath = "j2k/testData" + File.separator + getTestFilePath();
String kotlinPath = javaPath.replace(".jav", ".kt");
final File kotlinFile = new File(kotlinPath);
@@ -89,7 +87,7 @@ public String getName() {
@NotNull
public static Test suite() {
TestSuite suite = new TestSuite();
- suite.addTest(TestCaseBuilder.suiteForDirectory(getTestDataPathBase(), "/ast", new TestCaseBuilder.NamedTestFactory() {
+ suite.addTest(TestCaseBuilder.suiteForDirectory("j2k/testData", "/ast", new TestCaseBuilder.NamedTestFactory() {
@NotNull
@Override
public Test createTest(@NotNull String dataPath, @NotNull String name) {
|
617235b95357c5cfaf06710401d98eaee9572e6a
|
hbase
|
HBASE-10686 [WINDOWS] TestStripeStoreFileManager- fails on windows--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1575011 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java
index 0fdf5d8201ab..664653ff1760 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java
@@ -18,6 +18,13 @@
*/
package org.apache.hadoop.hbase.regionserver;
+import static org.apache.hadoop.hbase.regionserver.StripeStoreFileManager.OPEN_KEY;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
@@ -26,9 +33,6 @@
import java.util.Iterator;
import java.util.List;
-import static org.junit.Assert.*;
-import static org.apache.hadoop.hbase.regionserver.StripeStoreFileManager.OPEN_KEY;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -551,7 +555,7 @@ private static MockStoreFile createFile(
long size, long seqNum, byte[] startKey, byte[] endKey) throws Exception {
FileSystem fs = TEST_UTIL.getTestFileSystem();
Path testFilePath = StoreFile.getUniqueFile(fs, CFDIR);
- fs.create(testFilePath);
+ fs.create(testFilePath).close();
MockStoreFile sf = new MockStoreFile(TEST_UTIL, testFilePath, size, 0, false, seqNum);
if (startKey != null) {
sf.setMetadataValue(StripeStoreFileManager.STRIPE_START_KEY, startKey);
|
2b97a4cce4dc1bd0528e960f364edfa132d6bfa3
|
hadoop
|
svn merge -c 1371390 FIXES: YARN-14. Symlinks to- peer distributed cache files no longer work (Jason Lowe via bobby)--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1371395 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 1ae8a9dd1bf08..9cfad504cc408 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -36,3 +36,16 @@ Release 2.1.0-alpha - Unreleased
YARN-12. Fix findbugs warnings in FairScheduler. (Junping Du via acmurthy)
+Release 0.23.3 - Unreleased
+
+ INCOMPATIBLE CHANGES
+
+ NEW FEATURES
+
+ IMPROVEMENTS
+
+ BUG FIXES
+
+ YARN-14. Symlinks to peer distributed cache files no longer work
+ (Jason Lowe via bobby)
+
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/Container.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/Container.java
index e5ba3f2993609..af0f92ee6fc10 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/Container.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/Container.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.yarn.server.nodemanager.containermanager.container;
+import java.util.List;
import java.util.Map;
import org.apache.hadoop.fs.Path;
@@ -38,7 +39,7 @@ public interface Container extends EventHandler<ContainerEvent> {
Credentials getCredentials();
- Map<Path,String> getLocalizedResources();
+ Map<Path,List<String>> getLocalizedResources();
ContainerStatus cloneAndGetContainerStatus();
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java
index 1cbdbaa8146bc..c9802080854f9 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java
@@ -84,10 +84,10 @@ public class ContainerImpl implements Container {
private static final Log LOG = LogFactory.getLog(Container.class);
private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
- private final Map<LocalResourceRequest,String> pendingResources =
- new HashMap<LocalResourceRequest,String>();
- private final Map<Path,String> localizedResources =
- new HashMap<Path,String>();
+ private final Map<LocalResourceRequest,List<String>> pendingResources =
+ new HashMap<LocalResourceRequest,List<String>>();
+ private final Map<Path,List<String>> localizedResources =
+ new HashMap<Path,List<String>>();
private final List<LocalResourceRequest> publicRsrcs =
new ArrayList<LocalResourceRequest>();
private final List<LocalResourceRequest> privateRsrcs =
@@ -327,7 +327,7 @@ public String getUser() {
}
@Override
- public Map<Path,String> getLocalizedResources() {
+ public Map<Path,List<String>> getLocalizedResources() {
this.readLock.lock();
try {
assert ContainerState.LOCALIZED == getContainerState(); // TODO: FIXME!!
@@ -496,20 +496,25 @@ public ContainerState transition(ContainerImpl container,
try {
for (Map.Entry<String,LocalResource> rsrc : cntrRsrc.entrySet()) {
try {
- LocalResourceRequest req =
- new LocalResourceRequest(rsrc.getValue());
- container.pendingResources.put(req, rsrc.getKey());
- switch (rsrc.getValue().getVisibility()) {
- case PUBLIC:
- container.publicRsrcs.add(req);
- break;
- case PRIVATE:
- container.privateRsrcs.add(req);
- break;
- case APPLICATION:
- container.appRsrcs.add(req);
- break;
- }
+ LocalResourceRequest req =
+ new LocalResourceRequest(rsrc.getValue());
+ List<String> links = container.pendingResources.get(req);
+ if (links == null) {
+ links = new ArrayList<String>();
+ container.pendingResources.put(req, links);
+ }
+ links.add(rsrc.getKey());
+ switch (rsrc.getValue().getVisibility()) {
+ case PUBLIC:
+ container.publicRsrcs.add(req);
+ break;
+ case PRIVATE:
+ container.privateRsrcs.add(req);
+ break;
+ case APPLICATION:
+ container.appRsrcs.add(req);
+ break;
+ }
} catch (URISyntaxException e) {
LOG.info("Got exception parsing " + rsrc.getKey()
+ " and value " + rsrc.getValue());
@@ -560,15 +565,16 @@ static class LocalizedTransition implements
public ContainerState transition(ContainerImpl container,
ContainerEvent event) {
ContainerResourceLocalizedEvent rsrcEvent = (ContainerResourceLocalizedEvent) event;
- String sym = container.pendingResources.remove(rsrcEvent.getResource());
- if (null == sym) {
+ List<String> syms =
+ container.pendingResources.remove(rsrcEvent.getResource());
+ if (null == syms) {
LOG.warn("Localized unknown resource " + rsrcEvent.getResource() +
" for container " + container.getContainerID());
assert false;
// fail container?
return ContainerState.LOCALIZING;
}
- container.localizedResources.put(rsrcEvent.getLocation(), sym);
+ container.localizedResources.put(rsrcEvent.getLocation(), syms);
if (!container.pendingResources.isEmpty()) {
return ContainerState.LOCALIZING;
}
@@ -728,15 +734,16 @@ static class LocalizedResourceDuringKillTransition implements
@Override
public void transition(ContainerImpl container, ContainerEvent event) {
ContainerResourceLocalizedEvent rsrcEvent = (ContainerResourceLocalizedEvent) event;
- String sym = container.pendingResources.remove(rsrcEvent.getResource());
- if (null == sym) {
+ List<String> syms =
+ container.pendingResources.remove(rsrcEvent.getResource());
+ if (null == syms) {
LOG.warn("Localized unknown resource " + rsrcEvent.getResource() +
" for container " + container.getContainerID());
assert false;
// fail container?
return;
}
- container.localizedResources.put(rsrcEvent.getLocation(), sym);
+ container.localizedResources.put(rsrcEvent.getLocation(), syms);
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java
index 821d4a042b3cc..b06788341fe5c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java
@@ -111,7 +111,8 @@ public ContainerLaunch(Configuration configuration, Dispatcher dispatcher,
@SuppressWarnings("unchecked") // dispatcher not typed
public Integer call() {
final ContainerLaunchContext launchContext = container.getLaunchContext();
- final Map<Path,String> localResources = container.getLocalizedResources();
+ final Map<Path,List<String>> localResources =
+ container.getLocalizedResources();
ContainerId containerID = container.getContainerID();
String containerIdStr = ConverterUtils.toString(containerID);
final String user = launchContext.getUser();
@@ -533,7 +534,7 @@ public void sanitizeEnv(Map<String, String> environment,
}
static void writeLaunchEnv(OutputStream out,
- Map<String,String> environment, Map<Path,String> resources,
+ Map<String,String> environment, Map<Path,List<String>> resources,
List<String> command)
throws IOException {
ShellScriptBuilder sb = new ShellScriptBuilder();
@@ -543,8 +544,10 @@ static void writeLaunchEnv(OutputStream out,
}
}
if (resources != null) {
- for (Map.Entry<Path,String> link : resources.entrySet()) {
- sb.symlink(link.getKey(), link.getValue());
+ for (Map.Entry<Path,List<String>> entry : resources.entrySet()) {
+ for (String linkName : entry.getValue()) {
+ sb.symlink(entry.getKey(), linkName);
+ }
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/TestContainer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/TestContainer.java
index bc6ec196e17ea..cb7c19dc2faee 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/TestContainer.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/TestContainer.java
@@ -29,12 +29,15 @@
import java.net.URISyntaxException;
import java.nio.ByteBuffer;
import java.util.AbstractMap.SimpleEntry;
+import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
+import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
@@ -111,11 +114,12 @@ public void testLocalizationLaunch() throws Exception {
wc = new WrappedContainer(8, 314159265358979L, 4344, "yak");
assertEquals(ContainerState.NEW, wc.c.getContainerState());
wc.initContainer();
- Map<Path, String> localPaths = wc.localizeResources();
+ Map<Path, List<String>> localPaths = wc.localizeResources();
// all resources should be localized
assertEquals(ContainerState.LOCALIZED, wc.c.getContainerState());
- for (Entry<Path,String> loc : wc.c.getLocalizedResources().entrySet()) {
+ for (Entry<Path, List<String>> loc : wc.c.getLocalizedResources()
+ .entrySet()) {
assertEquals(localPaths.remove(loc.getKey()), loc.getValue());
}
assertTrue(localPaths.isEmpty());
@@ -578,10 +582,12 @@ public void initContainer() {
// Localize resources
// Skip some resources so as to consider them failed
- public Map<Path, String> doLocalizeResources(boolean checkLocalizingState,
- int skipRsrcCount) throws URISyntaxException {
+ public Map<Path, List<String>> doLocalizeResources(
+ boolean checkLocalizingState, int skipRsrcCount)
+ throws URISyntaxException {
Path cache = new Path("file:///cache");
- Map<Path, String> localPaths = new HashMap<Path, String>();
+ Map<Path, List<String>> localPaths =
+ new HashMap<Path, List<String>>();
int counter = 0;
for (Entry<String, LocalResource> rsrc : localResources.entrySet()) {
if (counter++ < skipRsrcCount) {
@@ -592,7 +598,7 @@ public Map<Path, String> doLocalizeResources(boolean checkLocalizingState,
}
LocalResourceRequest req = new LocalResourceRequest(rsrc.getValue());
Path p = new Path(cache, rsrc.getKey());
- localPaths.put(p, rsrc.getKey());
+ localPaths.put(p, Arrays.asList(rsrc.getKey()));
// rsrc copied to p
c.handle(new ContainerResourceLocalizedEvent(c.getContainerID(),
req, p));
@@ -602,7 +608,8 @@ public Map<Path, String> doLocalizeResources(boolean checkLocalizingState,
}
- public Map<Path, String> localizeResources() throws URISyntaxException {
+ public Map<Path, List<String>> localizeResources()
+ throws URISyntaxException {
return doLocalizeResources(true, 0);
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java
index bdd77f8a20b4b..822835dc3d08b 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java
@@ -28,6 +28,7 @@
import java.io.PrintWriter;
import java.lang.reflect.Field;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
@@ -95,9 +96,10 @@ public void testSpecialCharSymlinks() throws IOException {
writer.println(timeoutCommand);
writer.close();
- Map<Path, String> resources = new HashMap<Path, String>();
+ Map<Path, List<String>> resources =
+ new HashMap<Path, List<String>>();
Path path = new Path(shellFile.getAbsolutePath());
- resources.put(path, badSymlink);
+ resources.put(path, Arrays.asList(badSymlink));
FileOutputStream fos = new FileOutputStream(tempFile);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockContainer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockContainer.java
index 1b2e0653d06ff..519ff1834840d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockContainer.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockContainer.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.yarn.server.nodemanager.webapp;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
@@ -43,7 +44,8 @@ public class MockContainer implements Container {
private ContainerState state;
private String user;
private ContainerLaunchContext launchContext;
- private final Map<Path, String> resource = new HashMap<Path, String>();
+ private final Map<Path, List<String>> resource =
+ new HashMap<Path, List<String>>();
private RecordFactory recordFactory;
public MockContainer(ApplicationAttemptId appAttemptId,
@@ -92,7 +94,7 @@ public Credentials getCredentials() {
}
@Override
- public Map<Path, String> getLocalizedResources() {
+ public Map<Path, List<String>> getLocalizedResources() {
return resource;
}
|
8a07f772e2d36b759f4d5ab6f213fbb0869a1766
|
orientdb
|
Fixed issue with inner class and Object Database- interface--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/object/src/main/java/com/orientechnologies/orient/object/enhancement/OObjectEntityEnhancer.java b/object/src/main/java/com/orientechnologies/orient/object/enhancement/OObjectEntityEnhancer.java
index f4e5e89df7d..db4864a47d8 100644
--- a/object/src/main/java/com/orientechnologies/orient/object/enhancement/OObjectEntityEnhancer.java
+++ b/object/src/main/java/com/orientechnologies/orient/object/enhancement/OObjectEntityEnhancer.java
@@ -49,281 +49,286 @@
*/
public class OObjectEntityEnhancer {
- private static final OObjectEntityEnhancer instance = new OObjectEntityEnhancer();
+ private static final OObjectEntityEnhancer instance = new OObjectEntityEnhancer();
- public static final String ENHANCER_CLASS_PREFIX = "orientdb_";
+ public static final String ENHANCER_CLASS_PREFIX = "orientdb_";
- public OObjectEntityEnhancer() {
- }
+ public OObjectEntityEnhancer() {
+ }
- @SuppressWarnings("unchecked")
- public <T> T getProxiedInstance(final String iClass, final OEntityManager entityManager, final ODocument doc, Object... iArgs) {
- final Class<T> clazz = (Class<T>) entityManager.getEntityClass(iClass);
- return getProxiedInstance(clazz, doc, iArgs);
- }
+ @SuppressWarnings("unchecked")
+ public <T> T getProxiedInstance(final String iClass, final OEntityManager entityManager, final ODocument doc, Object... iArgs) {
+ final Class<T> clazz = (Class<T>) entityManager.getEntityClass(iClass);
+ return getProxiedInstance(clazz, doc, iArgs);
+ }
- @SuppressWarnings("unchecked")
- public <T> T getProxiedInstance(final String iClass, final Object iEnclosingInstance, final OEntityManager entityManager,
- final ODocument doc, Object... iArgs) {
- final Class<T> clazz = (Class<T>) entityManager.getEntityClass(iClass);
- return getProxiedInstance(clazz, iEnclosingInstance, doc, iArgs);
- }
+ @SuppressWarnings("unchecked")
+ public <T> T getProxiedInstance(final String iClass, final Object iEnclosingInstance, final OEntityManager entityManager,
+ final ODocument doc, Object... iArgs) {
+ final Class<T> clazz = (Class<T>) entityManager.getEntityClass(iClass);
+ return getProxiedInstance(clazz, iEnclosingInstance, doc, iArgs);
+ }
- public <T> T getProxiedInstance(final Class<T> iClass, final ODocument doc, Object... iArgs) {
- return getProxiedInstance(iClass, null, doc, iArgs);
- }
+ public <T> T getProxiedInstance(final Class<T> iClass, final ODocument doc, Object... iArgs) {
+ return getProxiedInstance(iClass, null, doc, iArgs);
+ }
- @SuppressWarnings("unchecked")
- public <T> T getProxiedInstance(final Class<T> iClass, Object iEnclosingInstance, final ODocument doc, Object... iArgs) {
- if (iClass == null) {
- throw new OSerializationException("Type " + doc.getClassName()
- + " cannot be serialized because is not part of registered entities. To fix this error register this class");
- }
- final Class<T> c;
- boolean isInnerClass = iClass.getEnclosingClass() != null;
- if (Proxy.class.isAssignableFrom(iClass)) {
- c = iClass;
- } else {
- ProxyFactory f = new ProxyFactory();
- f.setSuperclass(iClass);
- f.setFilter(new MethodFilter() {
- public boolean isHandled(Method m) {
- final String methodName = m.getName();
- try {
- return (isSetterMethod(methodName, m) || isGetterMethod(methodName, m) || methodName.equals("equals") || methodName
- .equals("hashCode"));
- } catch (NoSuchFieldException nsfe) {
- OLogManager.instance().warn(this, "Error handling the method %s in class %s", nsfe, m.getName(), iClass.getName());
- return false;
- } catch (SecurityException se) {
- OLogManager.instance().warn(this, "", se, m.getName(), iClass.getName());
- return false;
- }
- }
- });
- c = f.createClass();
- }
- MethodHandler mi = new OObjectProxyMethodHandler(doc);
- try {
- T newEntity;
- if (iArgs != null && iArgs.length > 0) {
- if (isInnerClass) {
- if (iEnclosingInstance == null) {
- iEnclosingInstance = iClass.getEnclosingClass().newInstance();
- }
- Object[] newArgs = new Object[iArgs.length + 1];
- newArgs[0] = iEnclosingInstance;
- for (int i = 0; i < iArgs.length; i++) {
- newArgs[i + 1] = iArgs[i];
- }
- iArgs = newArgs;
- }
- Constructor<T> constructor = null;
- for (Constructor<?> constr : c.getConstructors()) {
- boolean found = true;
- if (constr.getParameterTypes().length == iArgs.length) {
- for (int i = 0; i < constr.getParameterTypes().length; i++) {
- Class<?> parameterType = constr.getParameterTypes()[i];
- if (parameterType.isPrimitive()) {
- if (!isPrimitiveParameterCorrect(parameterType, iArgs[i])) {
- found = false;
- break;
- }
- } else if (iArgs[i] != null && !parameterType.isAssignableFrom(iArgs[i].getClass())) {
- found = false;
- break;
- }
- }
- } else {
- continue;
- }
- if (found) {
- constructor = (Constructor<T>) constr;
- break;
- }
- }
- if (constructor != null) {
- newEntity = (T) constructor.newInstance(iArgs);
- initDocument(iClass, newEntity, doc, (ODatabaseObject) ODatabaseRecordThreadLocal.INSTANCE.get().getDatabaseOwner());
- } else {
- if (iEnclosingInstance != null)
- newEntity = createInstanceNoParameters(c, iEnclosingInstance);
- else
- newEntity = createInstanceNoParameters(c, iClass);
- }
- } else {
- if (iEnclosingInstance != null)
- newEntity = createInstanceNoParameters(c, iEnclosingInstance);
- else
- newEntity = createInstanceNoParameters(c, iClass);
- }
- ((Proxy) newEntity).setHandler(mi);
- if (OObjectEntitySerializer.hasBoundedDocumentField(iClass))
- OObjectEntitySerializer.setFieldValue(OObjectEntitySerializer.getBoundedDocumentField(iClass), newEntity, doc);
- return newEntity;
- } catch (InstantiationException ie) {
- OLogManager.instance().error(this, "Error creating proxied instance for class " + iClass.getName(), ie);
- } catch (IllegalAccessException iae) {
- OLogManager.instance().error(this, "Error creating proxied instance for class " + iClass.getName(), iae);
- } catch (IllegalArgumentException iae) {
- OLogManager.instance().error(this, "Error creating proxied instance for class " + iClass.getName(), iae);
- } catch (SecurityException se) {
- OLogManager.instance().error(this, "Error creating proxied instance for class " + iClass.getName(), se);
- } catch (InvocationTargetException ite) {
- OLogManager.instance().error(this, "Error creating proxied instance for class " + iClass.getName(), ite);
- } catch (NoSuchMethodException nsme) {
- OLogManager.instance().error(this, "Error creating proxied instance for class " + iClass.getName(), nsme);
- }
- return null;
- }
+ @SuppressWarnings("unchecked")
+ public <T> T getProxiedInstance(final Class<T> iClass, Object iEnclosingInstance, final ODocument doc, Object... iArgs) {
+ if (iClass == null) {
+ throw new OSerializationException("Type " + doc.getClassName()
+ + " cannot be serialized because is not part of registered entities. To fix this error register this class");
+ }
+ final Class<T> c;
+ boolean isInnerClass = iClass.getEnclosingClass() != null;
+ if (Proxy.class.isAssignableFrom(iClass)) {
+ c = iClass;
+ } else {
+ ProxyFactory f = new ProxyFactory();
+ f.setSuperclass(iClass);
+ f.setFilter(new MethodFilter() {
+ public boolean isHandled(Method m) {
+ final String methodName = m.getName();
+ try {
+ return (isSetterMethod(methodName, m) || isGetterMethod(methodName, m) || methodName.equals("equals") || methodName
+ .equals("hashCode"));
+ } catch (NoSuchFieldException nsfe) {
+ OLogManager.instance().warn(this, "Error handling the method %s in class %s", nsfe, m.getName(), iClass.getName());
+ return false;
+ } catch (SecurityException se) {
+ OLogManager.instance().warn(this, "", se, m.getName(), iClass.getName());
+ return false;
+ }
+ }
+ });
+ c = f.createClass();
+ }
+ MethodHandler mi = new OObjectProxyMethodHandler(doc);
+ try {
+ T newEntity;
+ if (iArgs != null && iArgs.length > 0) {
+ if (isInnerClass) {
+ if (iEnclosingInstance == null) {
+ iEnclosingInstance = iClass.getEnclosingClass().newInstance();
+ }
+ Object[] newArgs = new Object[iArgs.length + 1];
+ newArgs[0] = iEnclosingInstance;
+ for (int i = 0; i < iArgs.length; i++) {
+ newArgs[i + 1] = iArgs[i];
+ }
+ iArgs = newArgs;
+ }
+ Constructor<T> constructor = null;
+ for (Constructor<?> constr : c.getConstructors()) {
+ boolean found = true;
+ if (constr.getParameterTypes().length == iArgs.length) {
+ for (int i = 0; i < constr.getParameterTypes().length; i++) {
+ Class<?> parameterType = constr.getParameterTypes()[i];
+ if (parameterType.isPrimitive()) {
+ if (!isPrimitiveParameterCorrect(parameterType, iArgs[i])) {
+ found = false;
+ break;
+ }
+ } else if (iArgs[i] != null && !parameterType.isAssignableFrom(iArgs[i].getClass())) {
+ found = false;
+ break;
+ }
+ }
+ } else {
+ continue;
+ }
+ if (found) {
+ constructor = (Constructor<T>) constr;
+ break;
+ }
+ }
+ if (constructor != null) {
+ newEntity = (T) constructor.newInstance(iArgs);
+ initDocument(iClass, newEntity, doc, (ODatabaseObject) ODatabaseRecordThreadLocal.INSTANCE.get().getDatabaseOwner());
+ } else {
+ if (iEnclosingInstance != null)
+ newEntity = createInstanceNoParameters(c, iEnclosingInstance);
+ else
+ newEntity = createInstanceNoParameters(c, iClass);
+ }
+ } else {
+ if (iEnclosingInstance != null)
+ newEntity = createInstanceNoParameters(c, iEnclosingInstance);
+ else
+ newEntity = createInstanceNoParameters(c, iClass);
+ }
+ ((Proxy) newEntity).setHandler(mi);
+ if (OObjectEntitySerializer.hasBoundedDocumentField(iClass))
+ OObjectEntitySerializer.setFieldValue(OObjectEntitySerializer.getBoundedDocumentField(iClass), newEntity, doc);
+ return newEntity;
+ } catch (InstantiationException ie) {
+ OLogManager.instance().error(this, "Error creating proxied instance for class " + iClass.getName(), ie);
+ } catch (IllegalAccessException iae) {
+ OLogManager.instance().error(this, "Error creating proxied instance for class " + iClass.getName(), iae);
+ } catch (IllegalArgumentException iae) {
+ OLogManager.instance().error(this, "Error creating proxied instance for class " + iClass.getName(), iae);
+ } catch (SecurityException se) {
+ OLogManager.instance().error(this, "Error creating proxied instance for class " + iClass.getName(), se);
+ } catch (InvocationTargetException ite) {
+ OLogManager.instance().error(this, "Error creating proxied instance for class " + iClass.getName(), ite);
+ } catch (NoSuchMethodException nsme) {
+ OLogManager.instance().error(this, "Error creating proxied instance for class " + iClass.getName(), nsme);
+ }
+ return null;
+ }
- public static synchronized OObjectEntityEnhancer getInstance() {
- return instance;
- }
+ public static synchronized OObjectEntityEnhancer getInstance() {
+ return instance;
+ }
- private boolean isSetterMethod(String fieldName, Method m) throws SecurityException, NoSuchFieldException {
- if (!fieldName.startsWith("set") || !checkIfFirstCharAfterPrefixIsUpperCase(fieldName, "set"))
- return false;
- if (m.getParameterTypes() != null && m.getParameterTypes().length != 1)
- return false;
- return !OObjectEntitySerializer.isTransientField(m.getDeclaringClass(), getFieldName(m));
- }
+ private boolean isSetterMethod(String fieldName, Method m) throws SecurityException, NoSuchFieldException {
+ if (!fieldName.startsWith("set") || !checkIfFirstCharAfterPrefixIsUpperCase(fieldName, "set"))
+ return false;
+ if (m.getParameterTypes() != null && m.getParameterTypes().length != 1)
+ return false;
+ return !OObjectEntitySerializer.isTransientField(m.getDeclaringClass(), getFieldName(m));
+ }
- private boolean isGetterMethod(String fieldName, Method m) throws SecurityException, NoSuchFieldException {
- int prefixLength;
- if (fieldName.startsWith("get") && checkIfFirstCharAfterPrefixIsUpperCase(fieldName, "get"))
- prefixLength = "get".length();
- else if (fieldName.startsWith("is") && checkIfFirstCharAfterPrefixIsUpperCase(fieldName, "is"))
- prefixLength = "is".length();
- else
- return false;
- if (m.getParameterTypes() != null && m.getParameterTypes().length > 0)
- return false;
- if (fieldName.length() <= prefixLength)
- return false;
- return !OObjectEntitySerializer.isTransientField(m.getDeclaringClass(), getFieldName(m));
- }
+ private boolean isGetterMethod(String fieldName, Method m) throws SecurityException, NoSuchFieldException {
+ int prefixLength;
+ if (fieldName.startsWith("get") && checkIfFirstCharAfterPrefixIsUpperCase(fieldName, "get"))
+ prefixLength = "get".length();
+ else if (fieldName.startsWith("is") && checkIfFirstCharAfterPrefixIsUpperCase(fieldName, "is"))
+ prefixLength = "is".length();
+ else
+ return false;
+ if (m.getParameterTypes() != null && m.getParameterTypes().length > 0)
+ return false;
+ if (fieldName.length() <= prefixLength)
+ return false;
+ return !OObjectEntitySerializer.isTransientField(m.getDeclaringClass(), getFieldName(m));
+ }
- protected String getFieldName(Method m) {
- if (m.getName().startsWith("get"))
- return getFieldName(m.getName(), "get");
- else if (m.getName().startsWith("set"))
- return getFieldName(m.getName(), "set");
- else
- return getFieldName(m.getName(), "is");
- }
+ protected String getFieldName(Method m) {
+ if (m.getName().startsWith("get"))
+ return getFieldName(m.getName(), "get");
+ else if (m.getName().startsWith("set"))
+ return getFieldName(m.getName(), "set");
+ else
+ return getFieldName(m.getName(), "is");
+ }
- protected String getFieldName(String methodName, String prefix) {
- StringBuffer fieldName = new StringBuffer();
- fieldName.append(Character.toLowerCase(methodName.charAt(prefix.length())));
- for (int i = (prefix.length() + 1); i < methodName.length(); i++) {
- fieldName.append(methodName.charAt(i));
- }
- return fieldName.toString();
- }
+ protected String getFieldName(String methodName, String prefix) {
+ StringBuffer fieldName = new StringBuffer();
+ fieldName.append(Character.toLowerCase(methodName.charAt(prefix.length())));
+ for (int i = (prefix.length() + 1); i < methodName.length(); i++) {
+ fieldName.append(methodName.charAt(i));
+ }
+ return fieldName.toString();
+ }
- private boolean checkIfFirstCharAfterPrefixIsUpperCase(String methodName, String prefix) {
- return methodName.length() > prefix.length() ? Character.isUpperCase(methodName.charAt(prefix.length())) : false;
- }
+ private boolean checkIfFirstCharAfterPrefixIsUpperCase(String methodName, String prefix) {
+ return methodName.length() > prefix.length() ? Character.isUpperCase(methodName.charAt(prefix.length())) : false;
+ }
- private boolean isPrimitiveParameterCorrect(Class<?> primitiveClass, Object parameterValue) {
- if (parameterValue == null)
- return false;
- final Class<?> parameterClass = parameterValue.getClass();
- if (Integer.TYPE.isAssignableFrom(primitiveClass))
- return Integer.class.isAssignableFrom(parameterClass);
- else if (Double.TYPE.isAssignableFrom(primitiveClass))
- return Double.class.isAssignableFrom(parameterClass);
- else if (Float.TYPE.isAssignableFrom(primitiveClass))
- return Float.class.isAssignableFrom(parameterClass);
- else if (Long.TYPE.isAssignableFrom(primitiveClass))
- return Long.class.isAssignableFrom(parameterClass);
- else if (Short.TYPE.isAssignableFrom(primitiveClass))
- return Short.class.isAssignableFrom(parameterClass);
- else if (Byte.TYPE.isAssignableFrom(primitiveClass))
- return Byte.class.isAssignableFrom(parameterClass);
- return false;
- }
+ private boolean isPrimitiveParameterCorrect(Class<?> primitiveClass, Object parameterValue) {
+ if (parameterValue == null)
+ return false;
+ final Class<?> parameterClass = parameterValue.getClass();
+ if (Integer.TYPE.isAssignableFrom(primitiveClass))
+ return Integer.class.isAssignableFrom(parameterClass);
+ else if (Double.TYPE.isAssignableFrom(primitiveClass))
+ return Double.class.isAssignableFrom(parameterClass);
+ else if (Float.TYPE.isAssignableFrom(primitiveClass))
+ return Float.class.isAssignableFrom(parameterClass);
+ else if (Long.TYPE.isAssignableFrom(primitiveClass))
+ return Long.class.isAssignableFrom(parameterClass);
+ else if (Short.TYPE.isAssignableFrom(primitiveClass))
+ return Short.class.isAssignableFrom(parameterClass);
+ else if (Byte.TYPE.isAssignableFrom(primitiveClass))
+ return Byte.class.isAssignableFrom(parameterClass);
+ return false;
+ }
- @SuppressWarnings({ "rawtypes", "unchecked" })
- protected void initDocument(Class<?> iClass, Object iInstance, ODocument iDocument, ODatabaseObject db)
- throws IllegalArgumentException, IllegalAccessException {
- for (Class<?> currentClass = iClass; currentClass != Object.class;) {
- for (Field f : currentClass.getDeclaredFields()) {
- if (f.getName().equals("this$0"))
- continue;
- if (!f.isAccessible()) {
- f.setAccessible(true);
- }
- Object o = f.get(iInstance);
- if (o != null) {
- if (OObjectEntitySerializer.isSerializedType(f)) {
- if (o instanceof List<?>) {
- List<?> list = new ArrayList();
- iDocument.field(f.getName(), list);
- o = new OObjectCustomSerializerList(OObjectEntitySerializer.getSerializedType(f), iDocument, list, (List<?>) o);
- f.set(iInstance, o);
- } else if (o instanceof Set<?>) {
- Set<?> set = new HashSet();
- iDocument.field(f.getName(), set);
- o = new OObjectCustomSerializerSet(OObjectEntitySerializer.getSerializedType(f), iDocument, set, (Set<?>) o);
- f.set(iInstance, o);
- } else if (o instanceof Map<?, ?>) {
- Map<?, ?> map = new HashMap();
- iDocument.field(f.getName(), map);
- o = new OObjectCustomSerializerMap(OObjectEntitySerializer.getSerializedType(f), iDocument, map, (Map<?, ?>) o);
- f.set(iInstance, o);
- } else {
- o = OObjectEntitySerializer.serializeFieldValue(o.getClass(), o);
- iDocument.field(f.getName(), o);
- }
- } else {
- iDocument.field(f.getName(), OObjectEntitySerializer.typeToStream(o, OType.getTypeByClass(f.getType()), db, iDocument));
- }
- }
- }
- currentClass = currentClass.getSuperclass();
- }
- }
+ @SuppressWarnings({ "rawtypes", "unchecked" })
+ protected void initDocument(Class<?> iClass, Object iInstance, ODocument iDocument, ODatabaseObject db)
+ throws IllegalArgumentException, IllegalAccessException {
+ for (Class<?> currentClass = iClass; currentClass != Object.class;) {
+ for (Field f : currentClass.getDeclaredFields()) {
+ if (f.getName().equals("this$0"))
+ continue;
+ if (!f.isAccessible()) {
+ f.setAccessible(true);
+ }
+ Object o = f.get(iInstance);
+ if (o != null) {
+ if (OObjectEntitySerializer.isSerializedType(f)) {
+ if (o instanceof List<?>) {
+ List<?> list = new ArrayList();
+ iDocument.field(f.getName(), list);
+ o = new OObjectCustomSerializerList(OObjectEntitySerializer.getSerializedType(f), iDocument, list, (List<?>) o);
+ f.set(iInstance, o);
+ } else if (o instanceof Set<?>) {
+ Set<?> set = new HashSet();
+ iDocument.field(f.getName(), set);
+ o = new OObjectCustomSerializerSet(OObjectEntitySerializer.getSerializedType(f), iDocument, set, (Set<?>) o);
+ f.set(iInstance, o);
+ } else if (o instanceof Map<?, ?>) {
+ Map<?, ?> map = new HashMap();
+ iDocument.field(f.getName(), map);
+ o = new OObjectCustomSerializerMap(OObjectEntitySerializer.getSerializedType(f), iDocument, map, (Map<?, ?>) o);
+ f.set(iInstance, o);
+ } else {
+ o = OObjectEntitySerializer.serializeFieldValue(o.getClass(), o);
+ iDocument.field(f.getName(), o);
+ }
+ } else {
+ iDocument.field(f.getName(), OObjectEntitySerializer.typeToStream(o, OType.getTypeByClass(f.getType()), db, iDocument));
+ }
+ }
+ }
+ currentClass = currentClass.getSuperclass();
+ }
+ }
- protected <T> T createInstanceNoParameters(Class<T> iProxiedClass, Class<?> iOriginalClass) throws SecurityException,
- NoSuchMethodException, IllegalArgumentException, InstantiationException, IllegalAccessException, InvocationTargetException {
- T instanceToReturn = null;
- final Class<?> enclosingClass = iOriginalClass.getEnclosingClass();
+ protected <T> T createInstanceNoParameters(Class<T> iProxiedClass, Class<?> iOriginalClass) throws SecurityException,
+ NoSuchMethodException, IllegalArgumentException, InstantiationException, IllegalAccessException, InvocationTargetException {
+ T instanceToReturn = null;
+ final Class<?> enclosingClass = iOriginalClass.getEnclosingClass();
- if (enclosingClass != null) {
- Object instanceOfEnclosingClass = createInstanceNoParameters(enclosingClass, enclosingClass);
+ if (enclosingClass != null) {
+ Object instanceOfEnclosingClass = createInstanceNoParameters(enclosingClass, enclosingClass);
- Constructor<T> ctor = iProxiedClass.getConstructor(enclosingClass);
+ Constructor<T> ctor = iProxiedClass.getConstructor(enclosingClass);
- if (ctor != null) {
- instanceToReturn = ctor.newInstance(instanceOfEnclosingClass);
- }
- } else {
- instanceToReturn = iProxiedClass.newInstance();
- }
+ if (ctor != null) {
+ instanceToReturn = ctor.newInstance(instanceOfEnclosingClass);
+ }
+ } else {
+ try {
+ instanceToReturn = iProxiedClass.newInstance();
+ } catch (InstantiationException e) {
+ OLogManager.instance().error(this, "Cannot create an instance of the enclosing class '%s'", iOriginalClass);
+ throw e;
+ }
+ }
- return instanceToReturn;
+ return instanceToReturn;
- }
+ }
- protected <T> T createInstanceNoParameters(Class<T> iProxiedClass, Object iEnclosingInstance) throws SecurityException,
- NoSuchMethodException, IllegalArgumentException, InstantiationException, IllegalAccessException, InvocationTargetException {
- T instanceToReturn = null;
- final Class<?> enclosingClass = iEnclosingInstance.getClass();
+ protected <T> T createInstanceNoParameters(Class<T> iProxiedClass, Object iEnclosingInstance) throws SecurityException,
+ NoSuchMethodException, IllegalArgumentException, InstantiationException, IllegalAccessException, InvocationTargetException {
+ T instanceToReturn = null;
+ final Class<?> enclosingClass = iEnclosingInstance.getClass();
- if (enclosingClass != null) {
+ if (enclosingClass != null) {
- Constructor<T> ctor = iProxiedClass.getConstructor(enclosingClass);
+ Constructor<T> ctor = iProxiedClass.getConstructor(enclosingClass);
- if (ctor != null) {
- instanceToReturn = ctor.newInstance(iEnclosingInstance);
- }
- } else {
- instanceToReturn = iProxiedClass.newInstance();
- }
+ if (ctor != null) {
+ instanceToReturn = ctor.newInstance(iEnclosingInstance);
+ }
+ } else {
+ instanceToReturn = iProxiedClass.newInstance();
+ }
- return instanceToReturn;
+ return instanceToReturn;
- }
+ }
}
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DictionaryTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DictionaryTest.java
index 441c66d4c95..e99182ebd63 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DictionaryTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/DictionaryTest.java
@@ -25,12 +25,14 @@
import com.orientechnologies.orient.core.record.ORecord;
import com.orientechnologies.orient.core.record.impl.ORecordFlat;
import com.orientechnologies.orient.object.db.OObjectDatabaseTx;
-import com.orientechnologies.orient.object.enhancement.ExactEntity;
@Test(groups = "dictionary")
public class DictionaryTest {
private String url;
+ public DictionaryTest() {
+ }
+
@Parameters(value = "url")
public DictionaryTest(String iURL) {
url = iURL;
@@ -129,14 +131,29 @@ public void testDictionaryInTx() throws IOException {
database.close();
}
+ public class ObjectDictionaryTest {
+ private String name;
+
+ public ObjectDictionaryTest() {
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+ }
+
@Test(dependsOnMethods = "testDictionaryMassiveCreate")
public void testDictionaryWithPOJOs() throws IOException {
OObjectDatabaseTx database = new OObjectDatabaseTx(url);
database.open("admin", "admin");
- database.getEntityManager().registerEntityClass(ExactEntity.class);
+ database.getEntityManager().registerEntityClass(ObjectDictionaryTest.class);
Assert.assertNull(database.getDictionary().get("testKey"));
- database.getDictionary().put("testKey", new ExactEntity());
+ database.getDictionary().put("testKey", new ObjectDictionaryTest());
Assert.assertNotNull(database.getDictionary().get("testKey"));
database.close();
|
52cef142154655900ba8669dce631aca7142f9b6
|
ReactiveX-RxJava
|
Confusing Javadoc for `toObservable(Future)`- methods--https://github.com/Netflix/RxJava/issues/148-
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/Observable.java b/rxjava-core/src/main/java/rx/Observable.java
index 2a48c7eb8e..c8f3f6a758 100644
--- a/rxjava-core/src/main/java/rx/Observable.java
+++ b/rxjava-core/src/main/java/rx/Observable.java
@@ -1976,7 +1976,8 @@ public static <T> Observable<T> toObservable(Iterable<T> iterable) {
*
* Any object that supports the {@link Future} interface can be converted into an Observable that emits
* the return value of the get() method in the object, by passing the object into the <code>toObservable</code> method.
- * The subscribe method on this synchronously so the Subscription returned doesn't nothing.
+ * <p>
+ * This is blocking so the Subscription returned when calling {@link #subscribe(Observer)} does nothing.
*
* @param future
* the source {@link Future}
@@ -1995,7 +1996,8 @@ public static <T> Observable<T> toObservable(Future<T> future) {
* Any object that supports the {@link Future} interface can be converted into an Observable that emits
* the return value of the get() method in the object, by passing the object into the <code>toObservable</code> method.
* The subscribe method on this synchronously so the Subscription returned doesn't nothing.
- * If the future timesout the {@link TimeoutException} exception is passed to the onError.
+ * <p>
+ * This is blocking so the Subscription returned when calling {@link #subscribe(Observer)} does nothing.
*
* @param future
* the source {@link Future}
|
a2ac32d1d9827f6edf7b72cf47fdbf9023e78ba9
|
intellij-community
|
optimization: cache org.junit.Test--
|
p
|
https://github.com/JetBrains/intellij-community
|
diff --git a/java/execution/impl/src/com/intellij/execution/junit/JUnitUtil.java b/java/execution/impl/src/com/intellij/execution/junit/JUnitUtil.java
index 61357de92a8ae..f884997e30386 100644
--- a/java/execution/impl/src/com/intellij/execution/junit/JUnitUtil.java
+++ b/java/execution/impl/src/com/intellij/execution/junit/JUnitUtil.java
@@ -24,6 +24,7 @@
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Condition;
+import com.intellij.openapi.util.Key;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.searches.ClassInheritorsSearch;
@@ -33,6 +34,7 @@
import com.intellij.util.containers.Convertor;
import junit.runner.BaseTestRunner;
import org.jetbrains.annotations.NonNls;
+import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.junit.After;
import org.junit.AfterClass;
@@ -51,19 +53,22 @@ public class JUnitUtil {
@NonNls public static final String RUN_WITH = "org.junit.runner.RunWith";
@NonNls public static final String SUITE_METHOD_NAME = "suite";
- public static boolean isSuiteMethod(final PsiMethod psiMethod) {
- if (psiMethod == null) return false;
+ private static final Key<PsiType> TEST_INTERFACE_KEY = Key.create(TEST_INTERFACE);
+ public static boolean isSuiteMethod(@NotNull PsiMethod psiMethod, @NotNull Project project) {
if (!psiMethod.hasModifierProperty(PsiModifier.PUBLIC)) return false;
if (!psiMethod.hasModifierProperty(PsiModifier.STATIC)) return false;
if (psiMethod.isConstructor()) return false;
final PsiType returnType = psiMethod.getReturnType();
- if (returnType != null) {
- if (!returnType.equalsToText(TEST_INTERFACE) && !returnType.equalsToText(TESTSUITE_CLASS)) {
- final PsiType testType =
- JavaPsiFacade.getInstance(psiMethod.getProject()).getElementFactory().createTypeFromText(TEST_INTERFACE, null);
- if (!TypeConversionUtil.isAssignable(testType, returnType)) {
- return false;
- }
+ if (returnType == null || returnType instanceof PsiPrimitiveType) return false;
+ if (!returnType.equalsToText(TEST_INTERFACE) && !returnType.equalsToText(TESTSUITE_CLASS)) {
+ PsiType cachedTestInterfaceType = project.getUserData(TEST_INTERFACE_KEY);
+ if (cachedTestInterfaceType == null) {
+ final PsiType testType = JavaPsiFacade.getInstance(project).getElementFactory().createTypeFromText(TEST_INTERFACE, null);
+ project.putUserData(TEST_INTERFACE_KEY,testType);
+ cachedTestInterfaceType = testType;
+ }
+ if (!TypeConversionUtil.isAssignable(cachedTestInterfaceType, returnType)) {
+ return false;
}
}
return psiMethod.getParameterList().getParametersCount() == 0;
@@ -109,7 +114,7 @@ public static boolean isTestClass(final PsiClass psiClass) {
return isTestClass(psiClass, true, true);
}
- private static boolean isTestClass(final PsiClass psiClass, boolean checkAbstract, boolean checkForTestCaseInheritance) {
+ private static boolean isTestClass(@NotNull PsiClass psiClass, boolean checkAbstract, boolean checkForTestCaseInheritance) {
if (!PsiClassUtil.isRunnableClass(psiClass, true, checkAbstract)) return false;
if (checkForTestCaseInheritance && isTestCaseInheritor(psiClass)) return true;
final PsiModifierList modifierList = psiClass.getModifierList();
@@ -118,7 +123,7 @@ private static boolean isTestClass(final PsiClass psiClass, boolean checkAbstrac
for (final PsiMethod method : psiClass.getAllMethods()) {
ProgressManager.checkCanceled();
- if (isSuiteMethod(method)) return true;
+ if (isSuiteMethod(method, psiClass.getProject())) return true;
if (isTestAnnotated(method)) return true;
}
@@ -241,7 +246,7 @@ public boolean process(PsiClass psiClass) {
public static PsiMethod findFirstTestMethod(PsiClass clazz) {
PsiMethod testMethod = null;
for (PsiMethod method : clazz.getMethods()) {
- if (isTestMethod(MethodLocation.elementInClass(method, clazz)) || isSuiteMethod(method)) {
+ if (isTestMethod(MethodLocation.elementInClass(method, clazz)) || isSuiteMethod(method, clazz.getProject())) {
testMethod = method;
break;
}
diff --git a/plugins/junit/src/com/intellij/execution/ConfigurationUtil.java b/plugins/junit/src/com/intellij/execution/ConfigurationUtil.java
index 47a46f16712f1..71031a9fdb6f3 100644
--- a/plugins/junit/src/com/intellij/execution/ConfigurationUtil.java
+++ b/plugins/junit/src/com/intellij/execution/ConfigurationUtil.java
@@ -19,6 +19,7 @@
import com.intellij.execution.junit.JUnitUtil;
import com.intellij.execution.junit.TestClassFilter;
import com.intellij.openapi.application.ApplicationManager;
+import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Ref;
import com.intellij.psi.*;
@@ -38,7 +39,8 @@ public class ConfigurationUtil {
public static boolean findAllTestClasses(final TestClassFilter testClassFilter, final Set<PsiClass> found) {
final PsiManager manager = testClassFilter.getPsiManager();
- GlobalSearchScope projectScopeWithoutLibraries = GlobalSearchScope.projectScope(manager.getProject());
+ final Project project = manager.getProject();
+ GlobalSearchScope projectScopeWithoutLibraries = GlobalSearchScope.projectScope(project);
final GlobalSearchScope scope = projectScopeWithoutLibraries.intersectWith(testClassFilter.getScope());
ClassInheritorsSearch.search(testClassFilter.getBase(), scope, true).forEach(new PsiElementProcessorAdapter<PsiClass>(new PsiElementProcessor<PsiClass>() {
public boolean execute(final PsiClass aClass) {
@@ -51,7 +53,7 @@ public boolean execute(final PsiClass aClass) {
final PsiMethod[] suiteMethods = ApplicationManager.getApplication().runReadAction(
new Computable<PsiMethod[]>() {
public PsiMethod[] compute() {
- return JavaPsiFacade.getInstance(manager.getProject()).getShortNamesCache().getMethodsByName(JUnitUtil.SUITE_METHOD_NAME, scope);
+ return JavaPsiFacade.getInstance(project).getShortNamesCache().getMethodsByName(JUnitUtil.SUITE_METHOD_NAME, scope);
}
}
);
@@ -67,7 +69,7 @@ public PsiClass compute() {
if (containingClass.getContainingClass() != null && !containingClass.hasModifierProperty(PsiModifier.STATIC)) continue;
if (ApplicationManager.getApplication().runReadAction(new Computable<Boolean>() {
public Boolean compute() {
- return JUnitUtil.isSuiteMethod(method);
+ return JUnitUtil.isSuiteMethod(method, project);
}
}).booleanValue()) {
found.add(containingClass);
|
a0be0f79a696abe57fec91088db97a2dcced676a
|
drools
|
fix failing tests--
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/AbstractKieModules.java b/drools-compiler/src/main/java/org/kie/builder/impl/AbstractKieModules.java
index 9b5d94e1a1c..a4f45369d1d 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/AbstractKieModules.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/AbstractKieModules.java
@@ -24,15 +24,14 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
-import java.util.List;
import java.util.Map;
-import java.util.Map.Entry;
import java.util.Set;
+import static org.kie.builder.impl.KieBuilderImpl.isKieExtension;
+
public abstract class AbstractKieModules
implements
InternalKieModule {
@@ -49,10 +48,10 @@ public abstract class AbstractKieModules
private Map<GAV, InternalKieModule> kieModules;
- private Map<String, InternalKieModule> kJarFromKBaseName = new HashMap<String, InternalKieModule>();
+ private final Map<String, InternalKieModule> kJarFromKBaseName = new HashMap<String, InternalKieModule>();
- private Map<String, KieBaseModel> kBaseModels = new HashMap<String, KieBaseModel>();
- private Map<String, KieSessionModel> kSessionModels = new HashMap<String, KieSessionModel>();
+ private final Map<String, KieBaseModel> kBaseModels = new HashMap<String, KieBaseModel>();
+ private final Map<String, KieSessionModel> kSessionModels = new HashMap<String, KieSessionModel>();
public AbstractKieModules(GAV gav) {
this.gav = gav;
@@ -144,10 +143,7 @@ public static void indexParts(Map<GAV, InternalKieModule> kJars,
public CompositeClassLoader createClassLaoder() {
Map<String, byte[]> classes = new HashMap<String, byte[]>();
- for( Entry<GAV, InternalKieModule> entry : kieModules.entrySet() ) {
- GAV gav = entry.getKey();
- InternalKieModule kModule = entry.getValue();
- List<String> fileNames = new ArrayList<String>();
+ for( InternalKieModule kModule : kieModules.values() ) {
for( String fileName : kModule.getFileNames() ) {
if ( fileName.endsWith( ".class" ) ) {
classes.put( fileName, kModule.getBytes( fileName ) );
@@ -224,16 +220,10 @@ public static void addFiles(CompositeKnowledgeBuilder ckbuilder,
String prefixPath = kieBaseModel.getName().replace( '.',
'/' );
for ( String fileName : kieModule.getFileNames() ) {
- if ( fileName.startsWith( prefixPath ) ) {
- String upperCharName = fileName.toUpperCase();
-
- if ( upperCharName.endsWith( "DRL" ) ) {
- ckbuilder.add( ResourceFactory.newByteArrayResource( kieModule.getBytes( fileName ) ),
- ResourceType.DRL );
- fileCount++;
- } else if ( upperCharName.endsWith( "BPMN2" ) ) {
+ if ( ((KieBaseModelImpl)kieBaseModel).isDefault() || fileName.startsWith( prefixPath ) ) {
+ if ( isKieExtension(fileName) ) {
ckbuilder.add( ResourceFactory.newByteArrayResource( kieModule.getBytes( fileName ) ),
- ResourceType.DRL );
+ ResourceType.determineResourceType( fileName ) );
fileCount++;
}
}
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/KieBuilderImpl.java b/drools-compiler/src/main/java/org/kie/builder/impl/KieBuilderImpl.java
index 76f10e51f58..c46fd856478 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/KieBuilderImpl.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/KieBuilderImpl.java
@@ -7,7 +7,6 @@
import org.drools.commons.jci.readers.DiskResourceReader;
import org.drools.commons.jci.readers.ResourceReader;
import org.drools.compiler.io.memory.MemoryFileSystem;
-import org.drools.core.util.ClassUtils;
import org.drools.core.util.StringUtils;
import org.drools.kproject.GAVImpl;
import org.drools.kproject.models.KieBaseModelImpl;
@@ -27,8 +26,6 @@
import org.kie.builder.Message.Level;
import org.kie.builder.ResourceType;
import org.kie.builder.Results;
-import org.kie.util.ClassLoaderUtil;
-import org.kie.util.CompositeClassLoader;
import java.io.ByteArrayInputStream;
import java.io.File;
@@ -46,6 +43,8 @@ public class KieBuilderImpl
implements
KieBuilder {
+ private static final String RESOURCES_ROOT = "src/main/resources/";
+
private Messages messages;
private final ResourceReader srcMfs;
@@ -106,7 +105,7 @@ public Results build() {
trgMfs = new MemoryFileSystem();
writePomAndKProject();
- ClassLoader classLoader = compileJavaClasses();
+ compileJavaClasses();
addKBasesFilesToTrg();
kieModule = new MemoryKieModules( gav,
@@ -149,12 +148,11 @@ private KnowledgeBaseConfiguration getKnowledgeBaseConfiguration(KieBaseModel ki
}
private void addKBaseFilesToTrg(KieBaseModel kieBase) {
- String resourcesRoot = "src/main/resources/";
for ( String fileName : srcMfs.getFileNames() ) {
if ( filterFileInKBase( kieBase,
fileName ) ) {
byte[] bytes = srcMfs.getBytes( fileName );
- trgMfs.write( fileName.substring( resourcesRoot.length() - 1 ),
+ trgMfs.write( fileName.substring( RESOURCES_ROOT.length() - 1 ),
bytes,
true );
}
@@ -162,11 +160,10 @@ private void addKBaseFilesToTrg(KieBaseModel kieBase) {
}
private void addMetaInfBuilder() {
- String resourcesRoot = "src/main/resources/";
for ( String fileName : srcMfs.getFileNames() ) {
- if ( fileName.startsWith( resourcesRoot ) ) {
+ if ( fileName.startsWith( RESOURCES_ROOT ) && !isKieExtension(fileName) ) {
byte[] bytes = srcMfs.getBytes( fileName );
- trgMfs.write( fileName.substring( resourcesRoot.length() - 1 ),
+ trgMfs.write( fileName.substring( RESOURCES_ROOT.length() - 1 ),
bytes,
true );
}
@@ -198,10 +195,10 @@ private boolean isFileInKiePackage(String fileName,
String pkgName) {
String pathName = pkgName.replace( '.',
'/' );
- return (fileName.startsWith( "src/main/resources/" + pathName + "/" ) || fileName.contains( "/" + pathName + "/" ));
+ return (fileName.startsWith( RESOURCES_ROOT + pathName + "/" ) || fileName.contains( "/" + pathName + "/" ));
}
- private boolean isKieExtension(String fileName) {
+ static boolean isKieExtension(String fileName) {
return fileName.endsWith( ResourceType.DRL.getDefaultExtension() ) ||
fileName.endsWith( ResourceType.BPMN2.getDefaultExtension() );
}
@@ -362,7 +359,7 @@ public static String generatePomProperties(GAV gav) {
return sBuilder.toString();
}
- private ClassLoader compileJavaClasses() {
+ private void compileJavaClasses() {
List<String> classFiles = new ArrayList<String>();
for ( String fileName : srcMfs.getFileNames() ) {
if ( fileName.endsWith( ".class" ) ) {
@@ -382,7 +379,7 @@ private ClassLoader compileJavaClasses() {
}
}
if ( javaFiles.isEmpty() ) {
- return getCompositeClassLoader();
+ return;
}
String[] sourceFiles = javaFiles.toArray( new String[javaFiles.size()] );
@@ -398,8 +395,6 @@ private ClassLoader compileJavaClasses() {
for ( CompilationProblem problem : res.getWarnings() ) {
messages.addMessage( problem );
}
-
- return res.getErrors().length == 0 ? getCompositeClassLoader() : getClass().getClassLoader();
}
public static String findPomProperties(ZipFile zipFile) {
@@ -434,15 +429,6 @@ public static File recurseToPomProperties(File file) {
return null;
}
- private CompositeClassLoader getCompositeClassLoader() {
- CompositeClassLoader ccl = ClassLoaderUtil.getClassLoader( null,
- getClass(),
- true );
- ccl.addClassLoader( new ClassUtils.MapClassLoader( trgMfs.getMap(),
- ccl ) );
- return ccl;
- }
-
private EclipseJavaCompiler createCompiler(String prefix) {
EclipseJavaCompilerSettings settings = new EclipseJavaCompilerSettings();
settings.setSourceVersion( "1.5" );
@@ -450,5 +436,4 @@ private EclipseJavaCompiler createCompiler(String prefix) {
return new EclipseJavaCompiler( settings,
prefix );
}
-
}
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/KieContainerImpl.java b/drools-compiler/src/main/java/org/kie/builder/impl/KieContainerImpl.java
index 0d9f13920bb..a0dd2000029 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/KieContainerImpl.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/KieContainerImpl.java
@@ -68,13 +68,11 @@ public KieBase getKieBase(String kBaseName) {
}
public KieSession getKieSession() {
- // @TODO
- throw new UnsupportedOperationException( "This method is still to be implemented" );
+ return getKieBase().newKieSession();
}
public StatelessKieSession getKieStatelessSession() {
- // @TODO
- throw new UnsupportedOperationException( "This method is still to be implemented" );
+ return getKieBase().newStatelessKieSession();
}
public KieSession getKieSession(String kSessionName) {
diff --git a/drools-compiler/src/main/java/org/kie/builder/impl/KieServicesImpl.java b/drools-compiler/src/main/java/org/kie/builder/impl/KieServicesImpl.java
index 2f1a8567e2d..887e9dac3db 100644
--- a/drools-compiler/src/main/java/org/kie/builder/impl/KieServicesImpl.java
+++ b/drools-compiler/src/main/java/org/kie/builder/impl/KieServicesImpl.java
@@ -1,9 +1,5 @@
package org.kie.builder.impl;
-import static org.drools.compiler.io.memory.MemoryFileSystem.readFromJar;
-
-import java.io.File;
-
import org.drools.audit.KnowledgeRuntimeLoggerProviderImpl;
import org.drools.command.impl.CommandFactoryServiceImpl;
import org.drools.concurrent.ExecutorProviderImpl;
@@ -25,6 +21,10 @@
import org.kie.persistence.jpa.KieStoreServices;
import org.kie.util.ServiceRegistryImpl;
+import java.io.File;
+
+import static org.drools.compiler.io.memory.MemoryFileSystem.readFromJar;
+
public class KieServicesImpl implements KieServices {
private ResourceFactoryService resourceFactory;
@@ -43,7 +43,7 @@ public KieRepository getKieRepository() {
* Returns KieContainer for the classpath
*/
public KieContainer getKieContainer() {
- return new KieContainerImpl( new ClasspathKieProject( getKieRepository() ), getKieRepository() );
+ return getKieContainer(getKieRepository().getDefaultGAV());
}
public KieContainer getKieContainer(GAV gav) {
diff --git a/drools-compiler/src/test/java/org/drools/builder/KieBuilderTest.java b/drools-compiler/src/test/java/org/drools/builder/KieBuilderTest.java
index 7b5c29e7262..f79bae34a1b 100644
--- a/drools-compiler/src/test/java/org/drools/builder/KieBuilderTest.java
+++ b/drools-compiler/src/test/java/org/drools/builder/KieBuilderTest.java
@@ -5,7 +5,6 @@
import org.drools.core.util.FileManager;
import org.drools.kproject.GAVImpl;
import org.drools.kproject.models.KieBaseModelImpl;
-import org.drools.kproject.models.KieModuleModelImpl;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@@ -33,9 +32,9 @@
import java.util.Arrays;
import java.util.List;
-import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class KieBuilderTest {
@@ -100,14 +99,14 @@ public void testKieModuleDepednencies() throws ClassNotFoundException, Interrupt
fail("Unable to build KieJar\n" + kb1.getResults( ).toString() );
}
KieRepository kr = ks.getKieRepository();
- KieModule kModule1 = kr.getKieModule( gav1 );
+ KieModule kModule1 = kr.getKieModule(gav1);
assertNotNull( kModule1 );
String namespace2 = "org.kie.test2";
GAV gav2 = KieFactory.Factory.get().newGav( namespace2, "memory", "1.0-SNAPSHOT" );
KieModuleModel kProj2 = createKieProject(namespace2);
- KieBaseModelImpl kieBase2 = ( KieBaseModelImpl ) ((KieModuleModelImpl)kProj2).getKieBaseModels().get( namespace2 );
+ KieBaseModelImpl kieBase2 = ( KieBaseModelImpl ) kProj2.getKieBaseModels().get( namespace2 );
kieBase2.addInclude( namespace1 );
KieFileSystem kfs2 = KieFactory.Factory.get().newKieFileSystem();
@@ -120,7 +119,7 @@ public void testKieModuleDepednencies() throws ClassNotFoundException, Interrupt
if ( kb2.hasResults( Level.ERROR ) ) {
fail("Unable to build KieJar\n" + kb2.getResults( ).toString() );
}
- KieModule kModule2= kr.getKieModule( gav2 );
+ KieModule kModule2= kr.getKieModule(gav2);
assertNotNull( kModule2);
KieContainer kContainer = ks.getKieContainer( gav2 );
@@ -132,8 +131,13 @@ public void testKieModuleDepednencies() throws ClassNotFoundException, Interrupt
kSession.fireAllRules();
assertEquals( 2, list.size() );
- assertEquals( "org.kie.test.Message", list.get(0).getClass().getName() );
- }
+ if ("org.kie.test1.Message".equals(list.get(0).getClass().getName())) {
+ assertEquals( "org.kie.test2.Message", list.get(1).getClass().getName() );
+ } else {
+ assertEquals( "org.kie.test2.Message", list.get(0).getClass().getName() );
+ assertEquals( "org.kie.test1.Message", list.get(1).getClass().getName() );
+ }
+ }
@Test
public void testNoPomXml() throws ClassNotFoundException, InterruptedException, IOException {
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/KieHelloWorldTest.java b/drools-compiler/src/test/java/org/drools/integrationtests/KieHelloWorldTest.java
index dda33523c8b..85bd6827967 100644
--- a/drools-compiler/src/test/java/org/drools/integrationtests/KieHelloWorldTest.java
+++ b/drools-compiler/src/test/java/org/drools/integrationtests/KieHelloWorldTest.java
@@ -32,7 +32,7 @@ public void testHelloWorld() throws Exception {
KieServices ks = KieServices.Factory.get();
KieFactory kf = KieFactory.Factory.get();
- KieFileSystem kfs = kf.newKieFileSystem().write( "r1.drl", drl );
+ KieFileSystem kfs = kf.newKieFileSystem().write( "src/main/resources/r1.drl", drl );
ks.newKieBuilder( kfs ).build();
KieSession ksession = ks.getKieContainer().getKieSession();
@@ -54,7 +54,7 @@ public void testFailingHelloWorld() throws Exception {
KieServices ks = KieServices.Factory.get();
KieFactory kf = KieFactory.Factory.get();
- KieFileSystem kfs = kf.newKieFileSystem().write( "r1.drl", drl );
+ KieFileSystem kfs = kf.newKieFileSystem().write( "src/main/resources/r1.drl", drl );
Results results = ks.newKieBuilder( kfs ).build();
assertEquals( 1, results.getInsertedMessages().size() );
@@ -81,8 +81,8 @@ public void testHelloWorldWithPackages() throws Exception {
KieFileSystem kfs = kf.newKieFileSystem()
.generateAndWritePomXML( gav )
- .write("src/main/resoureces/org/pkg1/r1.drl", drl1)
- .write("src/main/resoureces/org/pkg2/r2.drl", drl2)
+ .write("src/main/resources/KBase1/org/pkg1/r1.drl", drl1)
+ .write("src/main/resources/KBase1/org/pkg2/r2.drl", drl2)
.writeProjectXML( createKieProjectWithPackages(kf).toXML());
ks.newKieBuilder( kfs ).build();
diff --git a/drools-core/src/main/java/org/drools/world/impl/WorldImpl.java b/drools-core/src/main/java/org/drools/world/impl/WorldImpl.java
index 53d4769979c..2f868da93c4 100644
--- a/drools-core/src/main/java/org/drools/world/impl/WorldImpl.java
+++ b/drools-core/src/main/java/org/drools/world/impl/WorldImpl.java
@@ -16,28 +16,16 @@
package org.drools.world.impl;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.PriorityQueue;
-import java.util.Set;
-import java.util.concurrent.TimeUnit;
-
import org.drools.command.GetDefaultValue;
-import org.drools.command.NewStatefulKnowledgeSessionCommand;
-import org.drools.command.ResolvingKnowledgeCommandContext;
import org.drools.command.impl.ContextImpl;
import org.drools.command.impl.GenericCommand;
-import org.drools.time.SessionPseudoClock;
import org.kie.command.Command;
import org.kie.command.Context;
import org.kie.command.World;
import org.kie.runtime.CommandExecutor;
-import org.kie.runtime.StatefulKnowledgeSession;
-import org.kie.simulation.Simulation;
-import org.kie.simulation.SimulationPath;
-import org.kie.simulation.SimulationStep;
+
+import java.util.HashMap;
+import java.util.Map;
public class WorldImpl
implements World, GetDefaultValue, CommandExecutor {
diff --git a/drools-maven-plugin/src/test/java/org/drools/BuildMojoTest.java b/drools-maven-plugin/src/test/java/org/drools/BuildMojoTest.java
index 4ec028d0dc4..13d75aca5c3 100644
--- a/drools-maven-plugin/src/test/java/org/drools/BuildMojoTest.java
+++ b/drools-maven-plugin/src/test/java/org/drools/BuildMojoTest.java
@@ -1,13 +1,14 @@
package org.drools;
-import java.io.File;
-
import org.apache.maven.plugin.testing.AbstractMojoTestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
+import java.io.File;
+
+@Ignore
public class BuildMojoTest extends AbstractMojoTestCase {
@Before
@@ -20,7 +21,7 @@ protected void tearDown() throws Exception {
super.tearDown();
}
- @Test @Ignore
+ @Test
public void testSomething()
throws Exception
{
|
b9cab4ee5e2cc7e5cfde59a6815d87a209a65f3a
|
restlet-framework-java
|
1.0 beta 9 (not released yet) - Merged Restlet and- UniformInterface interfaces into a single Restlet interface for- simplification purpose. - Moved connectors handling from OriginServer upto- Component as other components like Proxies need them. - Renamed UniformCall- and related methods into RestletCall. - Renamed RestletCall.getHandler*()- methods into getRestlet*(). - Updated Jetty 6 to version 6.0 beta 14. -- Fixed issue with ServletCall.getRequestUri returning a trailing '?'. Reported- by Yuri de Wit. - Renamed NRE implementation classes HandlerTarget and- HandlerMapping to RestletTarget and RestletMapping. - Added a new extension- to NRE to support Apache Commons FileUpload to facilitate the processing of- multipart forms. Suggested by Yuri de Wit. - Removed RestletContainer and- RestletServer interfaces. - Renamed DefaultRestletContainer and- DefautRestletServer to RestletContainer and RestletServer for simplification- purpose. - The parent of all Restlets is now a simple Component and not- necessarily a RestletContainer. - Factory and Manager were refactored for- clarification and simplification purpose. - Added getInitParameters() to- Component to allow external code to pass init params in a standard way (for- example a main method or the ServerServlet).--
|
p
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/source/main/com/noelios/restlet/DirectoryRestlet.java b/source/main/com/noelios/restlet/DirectoryRestlet.java
index afb5a62daa..fe7d0c7652 100644
--- a/source/main/com/noelios/restlet/DirectoryRestlet.java
+++ b/source/main/com/noelios/restlet/DirectoryRestlet.java
@@ -27,7 +27,7 @@
import org.restlet.AbstractRestlet;
import org.restlet.RestletCall;
-import org.restlet.component.RestletContainer;
+import org.restlet.component.Component;
import org.restlet.data.Encoding;
import org.restlet.data.Encodings;
import org.restlet.data.Language;
@@ -74,14 +74,14 @@ public class DirectoryRestlet extends AbstractRestlet
/**
* Constructor.
- * @param container The parent container.
+ * @param parent The parent component.
* @param rootPath The directory's root path.
* @param deeply Indicates if the sub-directories are deeply accessible.
* @param indexName If no file name is specified, use the (optional) index name.
*/
- public DirectoryRestlet(RestletContainer container, String rootPath, boolean deeply, String indexName)
+ public DirectoryRestlet(Component parent, String rootPath, boolean deeply, String indexName)
{
- super(container);
+ super(parent);
this.rootPath = StringUtils.normalizePath(rootPath);
this.deeply = deeply;
this.defaultEncoding = Encodings.IDENTITY;
diff --git a/source/main/com/noelios/restlet/FileRestlet.java b/source/main/com/noelios/restlet/FileRestlet.java
index 69e984b19d..11177dc957 100644
--- a/source/main/com/noelios/restlet/FileRestlet.java
+++ b/source/main/com/noelios/restlet/FileRestlet.java
@@ -24,7 +24,7 @@
import org.restlet.AbstractRestlet;
import org.restlet.RestletCall;
-import org.restlet.component.RestletContainer;
+import org.restlet.component.Component;
import org.restlet.data.MediaType;
import org.restlet.data.Statuses;
@@ -46,13 +46,13 @@ public class FileRestlet extends AbstractRestlet
/**
* Constructor.
- * @param container The parent container.
+ * @param parent The parent component.
* @param filePath The file's path.
* @param mediaType The file's media type.
*/
- public FileRestlet(RestletContainer container, String filePath, MediaType mediaType)
+ public FileRestlet(Component parent, String filePath, MediaType mediaType)
{
- super(container);
+ super(parent);
this.filePath = filePath;
this.mediaType = mediaType;
this.timeToLive = 600;
diff --git a/source/main/com/noelios/restlet/GuardChainlet.java b/source/main/com/noelios/restlet/GuardChainlet.java
index e0f6576ac4..bb0821eee8 100644
--- a/source/main/com/noelios/restlet/GuardChainlet.java
+++ b/source/main/com/noelios/restlet/GuardChainlet.java
@@ -28,7 +28,7 @@
import org.restlet.AbstractChainlet;
import org.restlet.RestletCall;
-import org.restlet.component.RestletContainer;
+import org.restlet.component.Component;
import org.restlet.data.ChallengeResponse;
import org.restlet.data.ChallengeScheme;
import org.restlet.data.ChallengeSchemes;
@@ -63,16 +63,16 @@ public abstract class GuardChainlet extends AbstractChainlet
/**
* Constructor.
* If the authentication is not requested, the scheme and realm parameters are not necessary (pass null instead).
- * @param container The parent container.
+ * @param parent The parent component.
* @param logName The log name to used in the logging.properties file.
* @param authentication Indicates if the guard should attempt to authenticate the caller.
* @param scheme The authentication scheme to use.
* @param realm The authentication realm.
* @param authorization Indicates if the guard should attempt to authorize the caller.
*/
- public GuardChainlet(RestletContainer container, String logName, boolean authentication, ChallengeScheme scheme, String realm, boolean authorization)
+ public GuardChainlet(Component parent, String logName, boolean authentication, ChallengeScheme scheme, String realm, boolean authorization)
{
- super(container);
+ super(parent);
this.logger = Logger.getLogger(logName);
this.authentication = authentication;
diff --git a/source/main/com/noelios/restlet/LogChainlet.java b/source/main/com/noelios/restlet/LogChainlet.java
index 2f2e8efc45..8b1000f653 100644
--- a/source/main/com/noelios/restlet/LogChainlet.java
+++ b/source/main/com/noelios/restlet/LogChainlet.java
@@ -27,7 +27,7 @@
import org.restlet.AbstractChainlet;
import org.restlet.RestletCall;
-import org.restlet.component.RestletContainer;
+import org.restlet.component.Component;
import com.noelios.restlet.util.StringTemplate;
import com.noelios.restlet.util.UniformCallModel;
@@ -49,27 +49,27 @@ public class LogChainlet extends AbstractChainlet
/**
* Constructor using the default format.<br/>
* Default format using <a href="http://analog.cx/docs/logfmt.html">Analog syntax</a>: %Y-%m-%d\t%h:%n:%j\t%j\t%r\t%u\t%s\t%j\t%B\t%f\t%c\t%b\t%q\t%v\t%T
- * @param container The parent container.
+ * @param parent The parent component.
* @param logName The log name to used in the logging.properties file.
*/
- public LogChainlet(RestletContainer container, String logName)
+ public LogChainlet(Component parent, String logName)
{
- super(container);
+ super(parent);
this.logger = Logger.getLogger(logName);
this.logTemplate = null;
}
/**
* Constructor.
- * @param container The parent container.
+ * @param parent The parent component.
* @param logName The log name to used in the logging.properties file.
* @param logFormat The log format to use.
* @see com.noelios.restlet.util.UniformCallModel
* @see com.noelios.restlet.util.StringTemplate
*/
- public LogChainlet(RestletContainer container, String logName, String logFormat)
+ public LogChainlet(Component parent, String logName, String logFormat)
{
- super(container);
+ super(parent);
this.logger = Logger.getLogger(logName);
this.logTemplate = new StringTemplate(logFormat);
}
diff --git a/source/main/com/noelios/restlet/RedirectRestlet.java b/source/main/com/noelios/restlet/RedirectRestlet.java
index 741196ac9c..db890bf451 100644
--- a/source/main/com/noelios/restlet/RedirectRestlet.java
+++ b/source/main/com/noelios/restlet/RedirectRestlet.java
@@ -29,7 +29,7 @@
import org.restlet.AbstractRestlet;
import org.restlet.Manager;
import org.restlet.RestletCall;
-import org.restlet.component.RestletContainer;
+import org.restlet.component.Component;
import org.restlet.data.Reference;
import org.restlet.data.Statuses;
@@ -71,11 +71,11 @@ public class RedirectRestlet extends AbstractRestlet
public static final int MODE_CONNECTOR = 4;
/**
- * In this mode, the call is internally redirected within the current Restlet container. This is useful when
- * there are multiple ways to access to the same ressources.<br/>
+ * In this mode, the call is internally redirected within the parent component. This is useful when
+ * there are multiple ways to access to the same resource.<br/>
* Be careful when specifying the target pattern or infinite loops may occur.
*/
- public static final int MODE_CONTAINER = 5;
+ public static final int MODE_INTERNAL = 5;
/** Obtain a suitable logger. */
private static Logger logger = Logger.getLogger("com.noelios.restlet.RedirectRestlet");
@@ -91,13 +91,13 @@ public class RedirectRestlet extends AbstractRestlet
/**
* Constructor.
- * @param container The parent container.
+ * @param parent The parent component.
* @param targetPattern The pattern to build the target URI.
* @param mode The redirection mode.
*/
- public RedirectRestlet(RestletContainer container, String targetPattern, int mode)
+ public RedirectRestlet(Component parent, String targetPattern, int mode)
{
- super(container);
+ super(parent);
this.targetPattern = targetPattern;
this.mode = mode;
}
@@ -152,8 +152,8 @@ public void handle(RestletCall call)
getParent().callClient(this.connectorName, call);
break;
- case MODE_CONTAINER:
- logger.log(Level.INFO, "Redirecting to container: " + targetUri);
+ case MODE_INTERNAL:
+ logger.log(Level.INFO, "Redirecting internally: " + targetUri);
call.setResourceRef(target);
getParent().handle(call);
break;
diff --git a/source/main/com/noelios/restlet/StatusChainlet.java b/source/main/com/noelios/restlet/StatusChainlet.java
index a269c96632..70bb9cc62c 100644
--- a/source/main/com/noelios/restlet/StatusChainlet.java
+++ b/source/main/com/noelios/restlet/StatusChainlet.java
@@ -27,7 +27,7 @@
import org.restlet.AbstractChainlet;
import org.restlet.RestletCall;
-import org.restlet.component.RestletContainer;
+import org.restlet.component.Component;
import org.restlet.data.MediaTypes;
import org.restlet.data.Representation;
import org.restlet.data.Status;
@@ -61,14 +61,14 @@ public class StatusChainlet extends AbstractChainlet
/**
* Constructor.
- * @param container The parent container.
+ * @param parent The parent component.
* @param overwrite Indicates whether an existing representation should be overwritten.
* @param email Email address of the administrator to contact in case of error.
* @param homeURI The home URI to display in case the user got a "not found" exception.
*/
- public StatusChainlet(RestletContainer container, boolean overwrite, String email, String homeURI)
+ public StatusChainlet(Component parent, boolean overwrite, String email, String homeURI)
{
- super(container);
+ super(parent);
this.overwrite = overwrite;
this.email = email;
this.homeURI = homeURI;
diff --git a/source/main/com/noelios/restlet/impl/ChainletImpl.java b/source/main/com/noelios/restlet/impl/ChainletImpl.java
index 46cadc2cdc..cb75f26399 100644
--- a/source/main/com/noelios/restlet/impl/ChainletImpl.java
+++ b/source/main/com/noelios/restlet/impl/ChainletImpl.java
@@ -26,10 +26,9 @@
import org.restlet.Chainlet;
import org.restlet.RestletCall;
import org.restlet.Restlet;
-import org.restlet.component.RestletContainer;
+import org.restlet.component.Component;
import org.restlet.data.Statuses;
-
/**
* Implementation of a chainer of calls to a target Restlet.
*/
@@ -43,11 +42,11 @@ public class ChainletImpl extends AbstractRestlet implements Chainlet
/**
* Constructor.
- * @param container The Restlet container.
+ * @param parent The parent component.
*/
- public ChainletImpl(RestletContainer container)
+ public ChainletImpl(Component parent)
{
- super(container);
+ super(parent);
}
/**
diff --git a/source/main/com/noelios/restlet/impl/FactoryImpl.java b/source/main/com/noelios/restlet/impl/FactoryImpl.java
index d7ab5f7f06..2a6a67e7a7 100644
--- a/source/main/com/noelios/restlet/impl/FactoryImpl.java
+++ b/source/main/com/noelios/restlet/impl/FactoryImpl.java
@@ -41,8 +41,6 @@
import org.restlet.RestletCall;
import org.restlet.Restlet;
import org.restlet.component.Component;
-import org.restlet.component.RestletContainer;
-import org.restlet.component.RestletServer;
import org.restlet.connector.Client;
import org.restlet.connector.Server;
import org.restlet.data.*;
@@ -209,16 +207,6 @@ public RestletCall createCall()
return new RestletCallImpl();
}
- /**
- * Creates a delegate Chainlet.
- * @param container The Restlet container.
- * @return A new Chainlet.
- */
- public Chainlet createChainlet(RestletContainer container)
- {
- return new ChainletImpl(container);
- }
-
/**
* Creates a challenge response for a specific scheme (ex: HTTP BASIC authentication)
* using a login and a password as the credentials.
@@ -311,6 +299,28 @@ public CookieSetting createCookieSetting(String name, String value)
return new CookieSettingImpl(name, value);
}
+ /**
+ * Creates a delegate Chainlet for internal usage by the AbstractChainlet.<br/>
+ * If you need a Chainlet for your application, you should be subclassing the AbstractChainlet instead.
+ * @param parent The parent component.
+ * @return A new Chainlet.
+ */
+ public Chainlet createDelegateChainlet(Component parent)
+ {
+ return new ChainletImpl(parent);
+ }
+
+ /**
+ * Creates a delegate Maplet for internal usage by the DefaultMaplet.<br/>
+ * If you need a Maplet for your application, you should be using the DefaultMaplet instead.
+ * @param parent The parent component.
+ * @return A new Maplet.
+ */
+ public Maplet createDelegateMaplet(Component parent)
+ {
+ return new MapletImpl(parent);
+ }
+
/**
* Creates a new encoding from its standard name.
* @param name The standard encoding name.
@@ -340,16 +350,6 @@ public Language createLanguage(String name)
return (name == null) ? null : new LanguageImpl(name);
}
- /**
- * Creates a delegate Maplet.
- * @param container The Restlet container.
- * @return A new Maplet.
- */
- public Maplet createRestlet(RestletContainer container)
- {
- return new MapletImpl(container);
- }
-
/**
* Creates a new media type from its standard name.
* @param name The standard media type name.
@@ -400,28 +400,6 @@ public RepresentationMetadata createRepresentationMetadata(MediaType mediaType)
return new DefaultRepresentationMetadata(mediaType);
}
- /**
- * Creates a delegate Restlet container.
- * @param parent The parent component.
- * @param name The container's name.
- * @return The new Restlet container.
- */
- public RestletContainer createRestletContainer(Component parent, String name)
- {
- return new RestletContainerImpl(parent, name);
- }
-
- /**
- * Creates a delegate Restlet server.
- * @param parent The parent component.
- * @param name The server's name.
- * @return The new Restlet server.
- */
- public RestletServer createRestletServer(Component parent, String name)
- {
- return new RestletServerImpl(name);
- }
-
/**
* Create a new server connector for a given protocol.
* @param protocol The connector protocol.
diff --git a/source/main/com/noelios/restlet/impl/MapletImpl.java b/source/main/com/noelios/restlet/impl/MapletImpl.java
index 9efa8d98ad..e0f100edba 100644
--- a/source/main/com/noelios/restlet/impl/MapletImpl.java
+++ b/source/main/com/noelios/restlet/impl/MapletImpl.java
@@ -154,7 +154,7 @@ public boolean delegate(RestletCall call)
if(found)
{
// Updates the paths
- String oldHandlerPath = call.getHandlerPath();
+ String oldHandlerPath = call.getRestletPath();
String handlerPath = resourcePath.substring(0, matcher.end());
if(oldHandlerPath == null)
@@ -167,10 +167,10 @@ public boolean delegate(RestletCall call)
}
// Updates the matches
- call.getHandlerMatches().clear();
+ call.getRestletMatches().clear();
for(int i = 0; i < matcher.groupCount(); i++)
{
- call.getHandlerMatches().add(matcher.group(i + 1));
+ call.getRestletMatches().add(matcher.group(i + 1));
}
// Invoke the call handler
diff --git a/source/main/com/noelios/restlet/impl/RestletCallImpl.java b/source/main/com/noelios/restlet/impl/RestletCallImpl.java
index 63a27a6b2e..02de3e3106 100644
--- a/source/main/com/noelios/restlet/impl/RestletCallImpl.java
+++ b/source/main/com/noelios/restlet/impl/RestletCallImpl.java
@@ -83,11 +83,11 @@ public class RestletCallImpl implements RestletCall
/** The cookies to set in the client. */
protected List<CookieSetting> cookieSettings;
- /** The list of substrings matched in the handler path. */
- protected List<String> handlerMatches;
+ /** The list of substrings matched in the Restlet path. */
+ protected List<String> restletMatches;
- /** The handler path. */
- protected String handlerPath;
+ /** The Restlet path. */
+ protected String restletPath;
/** The representation provided by the client. */
protected Representation input;
@@ -452,35 +452,6 @@ public List<CookieSetting> getCookieSettings()
return this.cookieSettings;
}
- /**
- * Returns the list of substrings matched in the current handler path.
- * @return The list of substrings matched.
- * @see <a href="http://java.sun.com/j2se/1.5.0/docs/api/java/util/regex/Matcher.html#group(int)">Matcher.group()</a>
- */
- public List<String> getHandlerMatches()
- {
- if(this.handlerMatches == null) this.handlerMatches = new ArrayList<String>();
- return this.handlerMatches;
- }
-
- /**
- * Returns the part of the resource reference preceeding the resource path.
- * @return The part of the resource reference preceeding the resource path.
- */
- public String getHandlerPath()
- {
- return this.handlerPath;
- }
-
- /**
- * Returns the handler path as a reference.
- * @return The handler path as a reference.
- */
- public Reference getHandlerRef()
- {
- return new ReferenceImpl(getHandlerPath());
- }
-
/**
* Returns the representation provided by the client.
* @return The representation provided by the client.
@@ -555,24 +526,24 @@ public Reference getReferrerRef()
}
/**
- * Returns the part of the resource reference following the handler path.
- * @return The part of the resource reference following the handler path.
+ * Returns the relative resource path, following the absolute Restlet path in the resource reference.
+ * @return The relative resource path.
*/
public String getResourcePath()
{
- if(getHandlerPath() == null)
+ if(getRestletPath() == null)
{
return this.resourceRef.toString(false, false);
}
else
{
String resourceURI = this.resourceRef.toString(false, false);
- int length = getHandlerPath().length();
+ int length = getRestletPath().length();
if(logger.isLoggable(Level.FINE))
{
logger.fine("Resource URI: " + resourceURI);
- logger.fine("Handler path: " + getHandlerPath());
+ logger.fine("Handler path: " + getRestletPath());
logger.fine("Handler path length: " + length);
}
@@ -581,14 +552,43 @@ public String getResourcePath()
}
/**
- * Returns the resource reference.
- * @return The resource reference.
+ * Returns the absolute resource reference.
+ * @return The absolute resource reference.
*/
public Reference getResourceRef()
{
return this.resourceRef;
}
+ /**
+ * Returns the list of substrings matched in the current Restlet path.
+ * @return The list of substrings matched.
+ * @see <a href="http://java.sun.com/j2se/1.5.0/docs/api/java/util/regex/Matcher.html#group(int)">Matcher.group()</a>
+ */
+ public List<String> getRestletMatches()
+ {
+ if(this.restletMatches == null) this.restletMatches = new ArrayList<String>();
+ return this.restletMatches;
+ }
+
+ /**
+ * Returns the absolute Restlet path, preceeding the relative resource path in the resource reference.
+ * @return The absolute Restlet path.
+ */
+ public String getRestletPath()
+ {
+ return this.restletPath;
+ }
+
+ /**
+ * Returns the Restlet path as a reference.
+ * @return The Restlet path as a reference.
+ */
+ public Reference getRestletRef()
+ {
+ return new ReferenceImpl(getRestletPath());
+ }
+
/**
* Returns the security data related to this call.
* @return The security data related to this call.
@@ -719,7 +719,7 @@ public void setHandlerPath(String handlerPath)
logger.warning("Handler path doesn't match the start of the resource URI: " + handlerPath);
}
- this.handlerPath = handlerPath;
+ this.restletPath = handlerPath;
}
/**
@@ -778,7 +778,7 @@ public void setResourceRef(Reference resourceRef)
// Reset the current handler
setHandlerPath(null);
- getHandlerMatches().clear();
+ getRestletMatches().clear();
}
/**
diff --git a/source/main/com/noelios/restlet/impl/RestletContainerImpl.java b/source/main/com/noelios/restlet/impl/RestletContainerImpl.java
deleted file mode 100644
index 75253911f6..0000000000
--- a/source/main/com/noelios/restlet/impl/RestletContainerImpl.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- * Copyright 2005-2006 Jérôme LOUVEL
- *
- * The contents of this file are subject to the terms
- * of the Common Development and Distribution License
- * (the "License"). You may not use this file except
- * in compliance with the License.
- *
- * You can obtain a copy of the license at
- * http://www.opensource.org/licenses/cddl1.txt
- * See the License for the specific language governing
- * permissions and limitations under the License.
- *
- * When distributing Covered Code, include this CDDL
- * HEADER in each file and include the License file at
- * http://www.opensource.org/licenses/cddl1.txt
- * If applicable, add the following below this CDDL
- * HEADER, with the fields enclosed by brackets "[]"
- * replaced with your own identifying information:
- * Portions Copyright [yyyy] [name of copyright owner]
- */
-
-package com.noelios.restlet.impl;
-
-import org.restlet.Maplet;
-import org.restlet.RestletCall;
-import org.restlet.Restlet;
-import org.restlet.component.Component;
-import org.restlet.component.RestletContainer;
-
-
-/**
- * Origin server composed of Restlets.<br/>
- * Note that a Restlet containers are Maplets themselves and can be contained in Restlet servers.
- */
-public class RestletContainerImpl extends ComponentImpl implements RestletContainer
-{
- /** The parent container who delegates. */
- protected Component parent;
-
- /** Delegate Maplet handling root Restlets. */
- protected Maplet delegate;
-
- /**
- * Constructor.
- * @param parent The parent component.
- * @param name The unique name of the container.
- */
- public RestletContainerImpl(Component parent, String name)
- {
- super(name);
- this.parent = parent;
- this.delegate = new MapletImpl(parent);
- }
-
- /**
- * Returns the container.
- * @return The container.
- */
- public RestletContainer getContainer()
- {
- return this;
- }
-
- /**
- * Attaches a target instance shared by all calls.
- * @param pathPattern The path pattern used to map calls.
- * @param target The target instance to attach.
- * @see java.util.regex.Pattern
- */
- public void attach(String pathPattern, Restlet target)
- {
- delegate.attach(pathPattern, target);
- }
-
- /**
- * Attaches a target class. A new instance will be created for each call.
- * @param pathPattern The path pattern used to map calls.
- * @param targetClass The target class to attach (can have a constructor taking a RestletContainer
- * parameter).
- * @see java.util.regex.Pattern
- */
- public void attach(String pathPattern, Class<? extends Restlet> targetClass)
- {
- delegate.attach(pathPattern, targetClass);
- }
-
- /**
- * Detaches a target instance.
- * @param target The target instance to detach.
- */
- public void detach(Restlet target)
- {
- delegate.detach(target);
- }
-
- /**
- * Detaches a target class.
- * @param targetClass The Restlet class to detach.
- */
- public void detach(Class<? extends Restlet> targetClass)
- {
- delegate.detach(targetClass);
- }
-
- /**
- * Handles a call to a resource or a set of resources.
- * @param call The call to handle.
- */
- public void handle(RestletCall call)
- {
- delegate.handle(call);
- }
-
- /**
- * Delegates a call to one of the attached targets.<br/>
- * If no delegation is possible, a 404 error status (Client error, Not found) will be returned.
- * @param call The call to delegate.
- * @return True if the call was successfully delegated.
- */
- public boolean delegate(RestletCall call)
- {
- return delegate.delegate(call);
- }
-
- /**
- * Returns the description of this REST element.
- * @return The description of this REST element.
- */
- public String getDescription()
- {
- return "Restlet container";
- }
-
-}
diff --git a/source/main/com/noelios/restlet/impl/RestletServerImpl.java b/source/main/com/noelios/restlet/impl/RestletServerImpl.java
deleted file mode 100644
index 3312b3407c..0000000000
--- a/source/main/com/noelios/restlet/impl/RestletServerImpl.java
+++ /dev/null
@@ -1,167 +0,0 @@
-/*
- * Copyright 2005-2006 Jérôme LOUVEL
- *
- * The contents of this file are subject to the terms
- * of the Common Development and Distribution License
- * (the "License"). You may not use this file except
- * in compliance with the License.
- *
- * You can obtain a copy of the license at
- * http://www.opensource.org/licenses/cddl1.txt
- * See the License for the specific language governing
- * permissions and limitations under the License.
- *
- * When distributing Covered Code, include this CDDL
- * HEADER in each file and include the License file at
- * http://www.opensource.org/licenses/cddl1.txt
- * If applicable, add the following below this CDDL
- * HEADER, with the fields enclosed by brackets "[]"
- * replaced with your own identifying information:
- * Portions Copyright [yyyy] [name of copyright owner]
- */
-
-package com.noelios.restlet.impl;
-
-import java.util.Iterator;
-import java.util.Map;
-import java.util.TreeMap;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.restlet.RestletCall;
-import org.restlet.component.RestletContainer;
-import org.restlet.component.RestletServer;
-import org.restlet.data.Statuses;
-
-/**
- * Origin server composed of Restlets containers.<br/>
- * Each container is managing its own resource namespace.
- * Incoming calls are normally handled via pluggable server connectors.<br/>
- * Outcoming calls are normally handled via pluggable client connectors.<br/>
- * Other direct calls are handled by the default container.
- */
-public class RestletServerImpl extends ComponentImpl implements RestletServer
-{
- /** Obtain a suitable logger. */
- private static Logger logger = Logger.getLogger("com.noelios.restlet.component.RestletServerImpl");
-
- /**
- * The Restlet containers.
- * @link aggregationByValue
- * @associates <{DefaultContainer}>
- * @supplierCardinality 0..*
- * @clientCardinality 1
- * @label containers
- */
- protected Map<String, RestletContainer> containers;
-
- /** The default container handling direct calls on the server. */
- protected RestletContainer defaultContainer;
-
- /**
- * Constructor.
- * @param name The origin server's name.
- */
- public RestletServerImpl(String name)
- {
- super(name);
- this.containers = new TreeMap<String, RestletContainer>();
- this.defaultContainer = null;
- }
-
- /**
- * Adds a Restlet container.
- * @param name The unique name of the container.
- * @param container The container to add.
- * @return The added container.
- */
- public RestletContainer addContainer(String name, RestletContainer container)
- {
- this.containers.put(name, container);
- return container;
- }
-
- /**
- * Removes a Restlet container.
- * @param name The name of the container to remove.
- */
- public void removeContainer(String name)
- {
- this.containers.remove(name);
- }
-
- /**
- * Sets the default container handling direct calls to the server.
- * @param container The default container.
- */
- public void setDefaultContainer(RestletContainer container)
- {
- this.defaultContainer = container;
- }
-
- /**
- * Returns the default container handling direct calls to the server.
- * @return The default container.
- */
- public RestletContainer getDefaultContainer()
- {
- return this.defaultContainer;
- }
-
- /**
- * Handles a direct call.
- * @param call The call to handle.
- */
- public void handle(RestletCall call)
- {
- if(getDefaultContainer() != null)
- {
- if(getDefaultContainer().isStopped())
- {
- try
- {
- getDefaultContainer().start();
- }
- catch(Exception e)
- {
- call.setStatus(Statuses.SERVER_ERROR_INTERNAL);
- logger.log(Level.SEVERE, "Default Restlet container can't be started", e);
- }
- }
-
- getDefaultContainer().handle(call);
- }
- else
- {
- call.setStatus(Statuses.SERVER_ERROR_INTERNAL);
- logger.log(Level.SEVERE, "No default Restlet container defined");
- }
- }
-
- /**
- * Start hook. Starts all containers.
- */
- public void start() throws Exception
- {
- super.start();
-
- for(Iterator iter = this.containers.keySet().iterator(); iter.hasNext();)
- {
- this.containers.get(iter.next()).start();
- }
- }
-
- /**
- * Stop hook. Stops all containers.
- */
- public void stop() throws Exception
- {
- super.stop();
-
- for(Iterator iter = this.containers.keySet().iterator(); iter.hasNext();)
- {
- this.containers.get(iter.next()).stop();
- }
- }
-
-}
diff --git a/source/main/com/noelios/restlet/impl/RestletTarget.java b/source/main/com/noelios/restlet/impl/RestletTarget.java
index 32bf4c8e3d..c31da81419 100644
--- a/source/main/com/noelios/restlet/impl/RestletTarget.java
+++ b/source/main/com/noelios/restlet/impl/RestletTarget.java
@@ -30,7 +30,6 @@
import org.restlet.RestletCall;
import org.restlet.Restlet;
import org.restlet.component.Component;
-import org.restlet.component.RestletContainer;
import org.restlet.data.Statuses;
/**
@@ -53,8 +52,8 @@ public class RestletTarget
/** The container class to set in the constructor. */
protected Class containerClass;
- /** Indicates if the container can be set in the constructor. */
- protected boolean setContainer;
+ /** Indicates if the parent component can be set in the constructor. */
+ protected boolean setParent;
/**
* Constructor.
@@ -65,7 +64,7 @@ public RestletTarget(Restlet handler)
this.handler = handler;
this.handlerClass = null;
this.handlerConstructor = null;
- this.setContainer = false;
+ this.setParent = false;
}
/**
@@ -76,7 +75,7 @@ public RestletTarget(Class<? extends Restlet> handlerClass)
{
this.handler = null;
this.handlerClass = handlerClass;
- this.setContainer = false;
+ this.setParent = false;
// Try to find a constructor that accepts a RestletContainer parameter
Constructor[] constructors = handlerClass.getConstructors();
@@ -88,10 +87,10 @@ public RestletTarget(Class<? extends Restlet> handlerClass)
if(parameters.length == 1)
{
- if(RestletContainer.class.isAssignableFrom(parameters[0]))
+ if(Component.class.isAssignableFrom(parameters[0]))
{
this.handlerConstructor = constructors[i];
- this.setContainer = true;
+ this.setParent = true;
}
}
}
@@ -125,7 +124,7 @@ public void handle(RestletCall call, Component parent)
{
handler = getHandler();
}
- else if(isSetContainer())
+ else if(isSetParent())
{
handler = (Restlet)getHandlerConstructor().newInstance(parent);
}
@@ -212,12 +211,12 @@ public Class getContainerClass()
}
/**
- * Indicates if the container can be set in the constructor.
- * @return True if the container can be set in the constructor.
+ * Indicates if the parent component can be set in the constructor.
+ * @return True if the parent component can be set in the constructor.
*/
- public boolean isSetContainer()
+ public boolean isSetParent()
{
- return this.setContainer;
+ return this.setParent;
}
}
diff --git a/source/main/org/restlet/AbstractChainlet.java b/source/main/org/restlet/AbstractChainlet.java
index 41cc320acd..7f33bd54a7 100644
--- a/source/main/org/restlet/AbstractChainlet.java
+++ b/source/main/org/restlet/AbstractChainlet.java
@@ -22,7 +22,7 @@
package org.restlet;
-import org.restlet.component.RestletContainer;
+import org.restlet.component.Component;
/**
* Abstract Chainlet that can be easily subclassed.
@@ -35,12 +35,12 @@ public abstract class AbstractChainlet extends AbstractRestlet implements Chainl
/**
* Creates a new Chainlet in the given container.
- * @param container The parent container.
+ * @param parent The parent component.
*/
- public AbstractChainlet(RestletContainer container)
+ public AbstractChainlet(Component parent)
{
- super(container);
- this.delegate = Manager.createChainlet(container);
+ super(parent);
+ this.delegate = Manager.createDelegateChainlet(parent);
}
/**
diff --git a/source/main/org/restlet/AbstractRestlet.java b/source/main/org/restlet/AbstractRestlet.java
index fda7597e10..ef2d7c31c7 100644
--- a/source/main/org/restlet/AbstractRestlet.java
+++ b/source/main/org/restlet/AbstractRestlet.java
@@ -35,7 +35,7 @@ public abstract class AbstractRestlet implements Restlet
/** Indicates if the handler was started. */
protected boolean started;
- /** The container. */
+ /** The parent component. */
protected Component parent;
/**
diff --git a/source/main/org/restlet/DefaultMaplet.java b/source/main/org/restlet/DefaultMaplet.java
index 77a6bd9dd9..725ee82eba 100644
--- a/source/main/org/restlet/DefaultMaplet.java
+++ b/source/main/org/restlet/DefaultMaplet.java
@@ -22,7 +22,7 @@
package org.restlet;
-import org.restlet.component.RestletContainer;
+import org.restlet.component.Component;
/**
* Default Maplet that can be easily subclassed.
@@ -35,12 +35,12 @@ public class DefaultMaplet extends AbstractRestlet implements Maplet
/**
* Creates a new Maplet in the given container.
- * @param container The parent container.
+ * @param parent The parent component.
*/
- public DefaultMaplet(RestletContainer container)
+ public DefaultMaplet(Component parent)
{
- super(container);
- this.delegate = Manager.createMaplet(container);
+ super(parent);
+ this.delegate = Manager.createDelegateMaplet(parent);
}
/**
diff --git a/source/main/org/restlet/Factory.java b/source/main/org/restlet/Factory.java
index 56d5dbc1ce..32df153042 100644
--- a/source/main/org/restlet/Factory.java
+++ b/source/main/org/restlet/Factory.java
@@ -23,8 +23,6 @@
package org.restlet;
import org.restlet.component.Component;
-import org.restlet.component.RestletContainer;
-import org.restlet.component.RestletServer;
import org.restlet.connector.Client;
import org.restlet.connector.Server;
import org.restlet.data.*;
@@ -40,13 +38,6 @@ public interface Factory
*/
public RestletCall createCall();
- /**
- * Creates a delegate Chainlet.
- * @param container The Restlet container.
- * @return A new Chainlet.
- */
- public Chainlet createChainlet(RestletContainer container);
-
/**
* Creates a challenge response for a specific scheme using a user ID and a password as the credentials.<br/>
* @param scheme The challenge scheme to use.
@@ -87,6 +78,22 @@ public interface Factory
*/
public CookieSetting createCookieSetting(String name, String value);
+ /**
+ * Creates a delegate Chainlet for internal usage by the AbstractChainlet.<br/>
+ * If you need a Chainlet for your application, you should be subclassing the AbstractChainlet instead.
+ * @param parent The parent component.
+ * @return A new Chainlet.
+ */
+ public Chainlet createDelegateChainlet(Component parent);
+
+ /**
+ * Creates a delegate Maplet for internal usage by the DefaultMaplet.<br/>
+ * If you need a Maplet for your application, you should be using the DefaultMaplet instead.
+ * @param parent The parent component.
+ * @return A new Maplet.
+ */
+ public Maplet createDelegateMaplet(Component parent);
+
/**
* Creates a new encoding from its standard name.
* @param name The standard encoding name.
@@ -107,13 +114,6 @@ public interface Factory
*/
public Language createLanguage(String name);
- /**
- * Creates a delegate Maplet.
- * @param container The Restlet container.
- * @return A new Maplet.
- */
- public Maplet createRestlet(RestletContainer container);
-
/**
* Creates a new media type from its standard name.
* @param name The standard media type name.
@@ -148,22 +148,6 @@ public interface Factory
* @param mediaType The representation mediatype.
*/
public RepresentationMetadata createRepresentationMetadata(MediaType mediaType);
-
- /**
- * Creates a delegate Restlet container.
- * @param parent The parent component.
- * @param name The container's name.
- * @return The new Restlet container.
- */
- public RestletContainer createRestletContainer(Component parent, String name);
-
- /**
- * Creates a delegate Restlet server.
- * @param parent The parent component.
- * @param name The server's name.
- * @return The new Restlet server.
- */
- public RestletServer createRestletServer(Component parent, String name);
/**
* Create a new server connector for a given protocol.
diff --git a/source/main/org/restlet/Manager.java b/source/main/org/restlet/Manager.java
index 8ebafaa3e6..4fa7654fbe 100644
--- a/source/main/org/restlet/Manager.java
+++ b/source/main/org/restlet/Manager.java
@@ -29,26 +29,9 @@
import java.util.logging.Logger;
import org.restlet.component.Component;
-import org.restlet.component.RestletContainer;
-import org.restlet.component.RestletServer;
import org.restlet.connector.Client;
import org.restlet.connector.Server;
-import org.restlet.data.ChallengeResponse;
-import org.restlet.data.ChallengeScheme;
-import org.restlet.data.CharacterSet;
-import org.restlet.data.Cookie;
-import org.restlet.data.CookieSetting;
-import org.restlet.data.Encoding;
-import org.restlet.data.Form;
-import org.restlet.data.Language;
-import org.restlet.data.MediaType;
-import org.restlet.data.Method;
-import org.restlet.data.Parameter;
-import org.restlet.data.Protocol;
-import org.restlet.data.Reference;
-import org.restlet.data.RepresentationMetadata;
-import org.restlet.data.Status;
-import org.restlet.data.Tag;
+import org.restlet.data.*;
/**
* The main manager that also acts as an object factory. Façade around the current Restlet API implementation.
@@ -73,16 +56,6 @@ public static RestletCall createCall()
return getRegisteredFactory().createCall();
}
- /**
- * Creates a delegate Chainlet.
- * @param container The Restlet container.
- * @return A new Chainlet.
- */
- public static Chainlet createChainlet(RestletContainer container)
- {
- return getRegisteredFactory().createChainlet(container);
- }
-
/**
* Creates a challenge response for a specific scheme (ex: HTTP BASIC authentication)
* using a login and a password as the credentials.
@@ -139,6 +112,28 @@ public static CookieSetting createCookieSetting(String name, String value)
return getRegisteredFactory().createCookieSetting(name, value);
}
+ /**
+ * Creates a delegate Chainlet for internal usage by the AbstractChainlet.<br/>
+ * If you need a Chainlet for your application, you should be subclassing the AbstractChainlet instead.
+ * @param parent The parent component.
+ * @return A new Chainlet.
+ */
+ public static Chainlet createDelegateChainlet(Component parent)
+ {
+ return getRegisteredFactory().createDelegateChainlet(parent);
+ }
+
+ /**
+ * Creates a delegate Maplet for internal usage by the DefaultMaplet.<br/>
+ * If you need a Maplet for your application, you should be using the DefaultMaplet instead.
+ * @param parent The parent component.
+ * @return A new Maplet.
+ */
+ public static Maplet createDelegateMaplet(Component parent)
+ {
+ return getRegisteredFactory().createDelegateMaplet(parent);
+ }
+
/**
* Creates a new encoding from its standard name.
* @param name The standard encoding name.
@@ -168,16 +163,6 @@ public static Language createLanguage(String name)
return getRegisteredFactory().createLanguage(name);
}
- /**
- * Creates a delegate Maplet.
- * @param container The Restlet container.
- * @return A new Maplet.
- */
- public static Maplet createMaplet(RestletContainer container)
- {
- return getRegisteredFactory().createRestlet(container);
- }
-
/**
* Creates a new media type from its standard name.
* @param name The standard media type name.
@@ -228,28 +213,6 @@ public static RepresentationMetadata createRepresentationMetadata(MediaType medi
return getRegisteredFactory().createRepresentationMetadata(mediaType);
}
- /**
- * Creates a delegate Restlet container.
- * @param parent The parent Restlet container.
- * @param name The container's name.
- * @return The new Restlet container.
- */
- public static RestletContainer createRestletContainer(Component parent, String name)
- {
- return getRegisteredFactory().createRestletContainer(parent, name);
- }
-
- /**
- * Creates a delegate Restlet server.
- * @param parent The parent Restlet server.
- * @param name The server's name.
- * @return The new Restlet server.
- */
- public static RestletServer createRestletServer(RestletServer parent, String name)
- {
- return getRegisteredFactory().createRestletServer(parent, name);
- }
-
/**
* Create a new server connector for a given protocol.
* @param protocol The connector protocol.
diff --git a/source/main/org/restlet/RestletCall.java b/source/main/org/restlet/RestletCall.java
index 8eb88a207e..bd8117b190 100644
--- a/source/main/org/restlet/RestletCall.java
+++ b/source/main/org/restlet/RestletCall.java
@@ -91,25 +91,6 @@ public interface RestletCall
*/
public List<CookieSetting> getCookieSettings();
- /**
- * Returns the list of substrings matched in the current handler path.
- * @return The list of substrings matched.
- * @see <a href="http://java.sun.com/j2se/1.5.0/docs/api/java/util/regex/Matcher.html#group(int)">Matcher.group()</a>
- */
- public List<String> getHandlerMatches();
-
- /**
- * Returns the part of the resource reference preceeding the resource path.
- * @return The part of the resource reference preceeding the resource path.
- */
- public String getHandlerPath();
-
- /**
- * Returns the handler path as a reference.
- * @return The handler path as a reference.
- */
- public Reference getHandlerRef();
-
/**
* Returns the representation provided by the client.
* @return The representation provided by the client.
@@ -155,17 +136,36 @@ public interface RestletCall
public Reference getReferrerRef();
/**
- * Returns the part of the resource reference following the handler path.
- * @return The part of the resource reference following the handler path.
+ * Returns the relative resource path, following the absolute Restlet path in the resource reference.
+ * @return The relative resource path.
*/
public String getResourcePath();
/**
- * Returns the resource reference.
- * @return The resource reference.
+ * Returns the absolute resource reference.
+ * @return The absolute resource reference.
*/
public Reference getResourceRef();
+ /**
+ * Returns the list of substrings matched in the current Restlet path.
+ * @return The list of substrings matched.
+ * @see <a href="http://java.sun.com/j2se/1.5.0/docs/api/java/util/regex/Matcher.html#group(int)">Matcher.group()</a>
+ */
+ public List<String> getRestletMatches();
+
+ /**
+ * Returns the absolute Restlet path, preceeding the relative resource path in the resource reference.
+ * @return The absolute Restlet path.
+ */
+ public String getRestletPath();
+
+ /**
+ * Returns the Restlet path as a reference.
+ * @return The Restlet path as a reference.
+ */
+ public Reference getRestletRef();
+
/**
* Returns the security data related to this call.
* @return The security data related to this call.
diff --git a/source/main/org/restlet/RestletCallWrapper.java b/source/main/org/restlet/RestletCallWrapper.java
index 2428052995..73a3042454 100644
--- a/source/main/org/restlet/RestletCallWrapper.java
+++ b/source/main/org/restlet/RestletCallWrapper.java
@@ -127,34 +127,6 @@ public List<CookieSetting> getCookieSettings()
return getWrappedCall().getCookieSettings();
}
- /**
- * Returns the list of substrings matched in the current handler path.
- * @return The list of substrings matched.
- * @see <a href="http://java.sun.com/j2se/1.5.0/docs/api/java/util/regex/Matcher.html#group(int)">Matcher.group()</a>
- */
- public List<String> getHandlerMatches()
- {
- return getWrappedCall().getHandlerMatches();
- }
-
- /**
- * Returns the part of the resource reference preceeding the resource path.
- * @return The part of the resource reference preceeding the resource path.
- */
- public String getHandlerPath()
- {
- return getWrappedCall().getHandlerPath();
- }
-
- /**
- * Returns the handler path as a reference.
- * @return The handler path as a reference.
- */
- public Reference getHandlerRef()
- {
- return getWrappedCall().getHandlerRef();
- }
-
/**
* Returns the representation provided by the client.
* @return The representation provided by the client.
@@ -221,8 +193,8 @@ public Reference getReferrerRef()
}
/**
- * Returns the part of the resource reference following the handler path.
- * @return The part of the resource reference following the handler path.
+ * Returns the relative resource path, following the absolute Restlet path in the resource reference.
+ * @return The relative resource path.
*/
public String getResourcePath()
{
@@ -230,14 +202,42 @@ public String getResourcePath()
}
/**
- * Returns the resource reference.
- * @return The resource reference.
+ * Returns the absolute resource reference.
+ * @return The absolute resource reference.
*/
public Reference getResourceRef()
{
return getWrappedCall().getResourceRef();
}
+ /**
+ * Returns the list of substrings matched in the current Restlet path.
+ * @return The list of substrings matched.
+ * @see <a href="http://java.sun.com/j2se/1.5.0/docs/api/java/util/regex/Matcher.html#group(int)">Matcher.group()</a>
+ */
+ public List<String> getRestletMatches()
+ {
+ return getWrappedCall().getRestletMatches();
+ }
+
+ /**
+ * Returns the absolute Restlet path, preceeding the relative resource path in the resource reference.
+ * @return The absolute Restlet path.
+ */
+ public String getRestletPath()
+ {
+ return getWrappedCall().getRestletPath();
+ }
+
+ /**
+ * Returns the Restlet path as a reference.
+ * @return The Restlet path as a reference.
+ */
+ public Reference getRestletRef()
+ {
+ return getWrappedCall().getRestletRef();
+ }
+
/**
* Returns the security data related to this call.
* @return The security data related to this call.
diff --git a/source/main/com/noelios/restlet/impl/ComponentImpl.java b/source/main/org/restlet/component/AbstractComponent.java
similarity index 81%
rename from source/main/com/noelios/restlet/impl/ComponentImpl.java
rename to source/main/org/restlet/component/AbstractComponent.java
index 759de5e6b5..f531dd3efe 100644
--- a/source/main/com/noelios/restlet/impl/ComponentImpl.java
+++ b/source/main/org/restlet/component/AbstractComponent.java
@@ -20,26 +20,31 @@
* Portions Copyright [yyyy] [name of copyright owner]
*/
-package com.noelios.restlet.impl;
+package org.restlet.component;
import java.io.IOException;
+import java.util.ArrayList;
import java.util.Iterator;
+import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import org.restlet.AbstractRestlet;
import org.restlet.RestletCall;
import org.restlet.Restlet;
-import org.restlet.component.Component;
import org.restlet.connector.Client;
import org.restlet.connector.Server;
+import org.restlet.data.Parameter;
/**
- * Abstract origin server implementation.
+ * Abstract component implementation.
*/
-public abstract class ComponentImpl extends AbstractRestlet implements Component
+public abstract class AbstractComponent extends AbstractRestlet implements Component
{
- /** The component name. */
+ /** The initialization parameters. */
+ protected List<Parameter> initParameters;
+
+ /** The component name. */
protected String name;
/** The map of client connectors. */
@@ -52,13 +57,24 @@ public abstract class ComponentImpl extends AbstractRestlet implements Component
* Constructor.
* @param name The component name.
*/
- public ComponentImpl(String name)
+ public AbstractComponent(String name)
{
+ this.initParameters = null;
this.name = name;
this.clients = new TreeMap<String, Client>();
this.servers = new TreeMap<String, Server>();
}
+ /**
+ * Returns a modifiable list of initialization parameters
+ * @return A modifiable list of initialization parameters
+ */
+ public List<Parameter> getInitParameters()
+ {
+ if(this.initParameters == null) this.initParameters = new ArrayList<Parameter>();
+ return this.initParameters;
+ }
+
/**
* Returns the name of this REST element.
* @return The name of this REST element.
@@ -133,7 +149,7 @@ public void callClient(String name, RestletCall call) throws IOException
*/
public String getDescription()
{
- return "Abstract origin server";
+ return "Abstract component";
}
/**
diff --git a/source/main/org/restlet/component/Component.java b/source/main/org/restlet/component/Component.java
index e0e8a27d5f..f2692a36d6 100644
--- a/source/main/org/restlet/component/Component.java
+++ b/source/main/org/restlet/component/Component.java
@@ -23,12 +23,14 @@
package org.restlet.component;
import java.io.IOException;
+import java.util.List;
import org.restlet.Element;
import org.restlet.RestletCall;
import org.restlet.Restlet;
import org.restlet.connector.Client;
import org.restlet.connector.Server;
+import org.restlet.data.Parameter;
/**
* Abstract unit of software instructions and internal state.<br/><br/>
@@ -39,7 +41,13 @@
*/
public interface Component extends Element, Restlet
{
- /**
+ /**
+ * Returns a modifiable list of initialization parameters
+ * @return A modifiable list of initialization parameters
+ */
+ public List<Parameter> getInitParameters();
+
+ /**
* Adds a server connector to this component.
* @param server The server connector to add.
* @return The server connector added.
diff --git a/source/main/org/restlet/component/DefaultRestletContainer.java b/source/main/org/restlet/component/DefaultRestletContainer.java
index b2b824953b..531fc92341 100644
--- a/source/main/org/restlet/component/DefaultRestletContainer.java
+++ b/source/main/org/restlet/component/DefaultRestletContainer.java
@@ -22,24 +22,26 @@
package org.restlet.component;
-import java.io.IOException;
-
-import org.restlet.AbstractRestlet;
-import org.restlet.Manager;
+import org.restlet.Maplet;
import org.restlet.Restlet;
import org.restlet.RestletCall;
-import org.restlet.connector.Client;
-import org.restlet.connector.Server;
+
+import com.noelios.restlet.impl.MapletImpl;
/**
- * Default Restlet container that can be easily subclassed.<br/> <br/> Component acting as a container for
- * Restlets, Chainlets and Maplets. Calls are first intercepted by the container which can do various checks
- * before effectively delegating it to one of the registered root Restlets.
+ * Container for Maplets, Chainlets or Restlets.<br/>
+ * Note that a container is a Maplet itself and can be part of a larger RestletServer.<br/>
+ * Calls are first intercepted by the container which can do various checks before effectively delegating it to one of the registered root Restlets.
+ * Restlet containers can also be contained within a Restlet server.
+ * @see <a href="http://www.restlet.org/tutorial#part05">Tutorial: Restlets servers and containers</a>
*/
-public class DefaultRestletContainer extends AbstractRestlet implements RestletContainer
+public class DefaultRestletContainer extends AbstractComponent implements Maplet
{
- /** The delegate Restlet container. */
- protected RestletContainer delegate;
+ /** The parent container who delegates. */
+ protected Component parent;
+
+ /** Delegate Maplet handling root Restlets. */
+ protected Maplet delegate;
/**
* Constructor.
@@ -47,65 +49,19 @@ public class DefaultRestletContainer extends AbstractRestlet implements RestletC
*/
public DefaultRestletContainer(String name)
{
- this(null, name);
+ this(null, name);
}
/**
* Constructor.
- * @param server The parent Restlet server.
+ * @param parent The parent component.
* @param name The unique name of the container.
*/
- public DefaultRestletContainer(RestletServer server, String name)
- {
- this.delegate = Manager.createRestletContainer(server, name);
- }
-
- /**
- * Adds a server connector to this component.
- * @param server The server connector to add.
- * @return The server connector added.
- */
- public Server addServer(Server server)
- {
- return delegate.addServer(server);
- }
-
- /**
- * Removes a server connector from this component.
- * @param name The name of the server connector to remove.
- */
- public void removeServer(String name)
- {
- delegate.removeServer(name);
- }
-
- /**
- * Adds a client connector to this component.
- * @param client The client connector to add.
- * @return The client connector added.
- */
- public Client addClient(Client client)
- {
- return delegate.addClient(client);
- }
-
- /**
- * Removes a client connector from this component.
- * @param name The name of the client connector to remove.
- */
- public void removeClient(String name)
- {
- delegate.removeClient(name);
- }
-
- /**
- * Calls a client connector.
- * @param name The name of the client connector.
- * @param call The call to handle.
- */
- public void callClient(String name, RestletCall call) throws IOException
+ public DefaultRestletContainer(Component parent, String name)
{
- delegate.callClient(name, call);
+ super(name);
+ this.parent = parent;
+ this.delegate = new MapletImpl(parent);
}
/**
@@ -142,7 +98,7 @@ public void detach(Restlet target)
/**
* Detaches a target class.
- * @param targetClass The target class to detach.
+ * @param targetClass The Restlet class to detach.
*/
public void detach(Class<? extends Restlet> targetClass)
{
@@ -150,62 +106,23 @@ public void detach(Class<? extends Restlet> targetClass)
}
/**
- * Delegates a call to one of the attached targets.<br/>
- * If no delegation is possible, a 404 error status (Client error, Not found) will be returned.
- * @param call The call to delegate.
- * @return True if the call was successfully delegated.
- */
- public boolean delegate(RestletCall call)
- {
- return delegate.delegate(call);
- }
-
- /**
- * Handles a uniform call.
- * @param call The uniform call to handle.
+ * Handles a call to a resource or a set of resources.
+ * @param call The call to handle.
*/
public void handle(RestletCall call)
{
delegate.handle(call);
}
- /** Starts the handler. */
- public void start() throws Exception
- {
- delegate.start();
- }
-
- /** Stops the handler. */
- public void stop() throws Exception
- {
- delegate.stop();
- }
-
/**
- * Indicates if the handler is started.
- * @return True if the handler is started.
- */
- public boolean isStarted()
- {
- return delegate.isStarted();
- }
-
- /**
- * Indicates if the handler is stopped.
- * @return True if the handler is stopped.
- */
- public boolean isStopped()
- {
- return delegate.isStopped();
- }
-
- /**
- * Returns the name of this REST component.
- * @return The name of this REST component.
+ * Delegates a call to one of the attached targets.<br/>
+ * If no delegation is possible, a 404 error status (Client error, Not found) will be returned.
+ * @param call The call to delegate.
+ * @return True if the call was successfully delegated.
*/
- public String getName()
+ public boolean delegate(RestletCall call)
{
- return delegate.getName();
+ return delegate.delegate(call);
}
/**
@@ -214,7 +131,7 @@ public String getName()
*/
public String getDescription()
{
- return delegate.getDescription();
+ return "Restlet container";
}
}
diff --git a/source/main/org/restlet/component/DefaultRestletServer.java b/source/main/org/restlet/component/DefaultRestletServer.java
index 952d0f01e8..9630e1454d 100644
--- a/source/main/org/restlet/component/DefaultRestletServer.java
+++ b/source/main/org/restlet/component/DefaultRestletServer.java
@@ -22,21 +22,40 @@
package org.restlet.component;
-import java.io.IOException;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.TreeMap;
+import java.util.logging.Level;
+import java.util.logging.Logger;
-import org.restlet.AbstractRestlet;
-import org.restlet.Manager;
import org.restlet.RestletCall;
-import org.restlet.connector.Client;
-import org.restlet.connector.Server;
+import org.restlet.data.Statuses;
/**
- * Default Restlet server that can be easily subclassed.
+ * Component composed of multiple Restlet containers.<br/>
+ * Each container is managing its own resource namespace.
+ * Incoming calls are normally handled via pluggable server connectors.<br/>
+ * Outcoming calls are normally handled via pluggable client connectors.<br/>
+ * Other direct calls are handled by the default container.
+ * @see <a href="http://www.restlet.org/tutorial#part05">Tutorial: Restlets servers and containers</a>
*/
-public class DefaultRestletServer extends AbstractRestlet implements RestletServer
+public class DefaultRestletServer extends AbstractComponent
{
- /** The deletate Restlet server. */
- protected RestletServer delegate;
+ /** Obtain a suitable logger. */
+ private static Logger logger = Logger.getLogger("com.noelios.restlet.component.RestletServerImpl");
+
+ /**
+ * The Restlet containers.
+ * @link aggregationByValue
+ * @associates <{DefaultContainer}>
+ * @supplierCardinality 0..*
+ * @clientCardinality 1
+ * @label containers
+ */
+ protected Map<String, DefaultRestletContainer> containers;
+
+ /** The default container handling direct calls on the server. */
+ protected DefaultRestletContainer defaultContainer;
/**
* Constructor.
@@ -44,18 +63,19 @@ public class DefaultRestletServer extends AbstractRestlet implements RestletServ
*/
public DefaultRestletServer(String name)
{
- this.delegate = Manager.createRestletServer(this, name);
+ super(name);
+ this.containers = new TreeMap<String, DefaultRestletContainer>();
+ this.defaultContainer = null;
}
/**
* Adds a Restlet container.
* @param name The unique name of the container.
* @param container The container to add.
- * @return The added container.
*/
- public RestletContainer addContainer(String name, RestletContainer container)
+ public void addContainer(String name, DefaultRestletContainer container)
{
- return delegate.addContainer(name, container);
+ this.containers.put(name, container);
}
/**
@@ -64,130 +84,81 @@ public RestletContainer addContainer(String name, RestletContainer container)
*/
public void removeContainer(String name)
{
- delegate.removeContainer(name);
- }
-
- /**
- * Returns the default container handling direct calls to the server.
- * @return The default container.
- */
- public RestletContainer getDefaultContainer()
- {
- return delegate.getDefaultContainer();
+ this.containers.remove(name);
}
/**
* Sets the default container handling direct calls to the server.
* @param container The default container.
*/
- public void setDefaultContainer(RestletContainer container)
- {
- delegate.setDefaultContainer(container);
- }
-
- /**
- * Adds a server connector to this component.
- * @param server The server connector to add.
- * @return The server connector added.
- */
- public Server addServer(Server server)
- {
- return delegate.addServer(server);
- }
-
- /**
- * Removes a server connector from this component.
- * @param name The name of the server connector to remove.
- */
- public void removeServer(String name)
- {
- delegate.removeServer(name);
- }
-
- /**
- * Adds a client connector to this component.
- * @param client The client connector to add.
- * @return The client connector added.
- */
- public Client addClient(Client client)
+ public void setDefaultTarget(DefaultRestletContainer container)
{
- return delegate.addClient(client);
+ this.defaultContainer = container;
}
/**
- * Removes a client connector from this component.
- * @param name The name of the client connector to remove.
+ * Returns the default container handling direct calls to the server.
+ * @return The default container.
*/
- public void removeClient(String name)
+ public DefaultRestletContainer getDefaultTarget()
{
- delegate.removeClient(name);
+ return this.defaultContainer;
}
/**
- * Calls a client connector.
- * @param name The name of the client connector.
+ * Handles a direct call.
* @param call The call to handle.
*/
- public void callClient(String name, RestletCall call) throws IOException
- {
- delegate.callClient(name, call);
- }
-
- /**
- * Handles a uniform call.
- * @param call The uniform call to handle.
- */
public void handle(RestletCall call)
{
- delegate.handle(call);
- }
-
- /** Starts the handler. */
- public void start() throws Exception
- {
- delegate.start();
- }
-
- /** Stops the handler. */
- public void stop() throws Exception
- {
- delegate.stop();
+ if(getDefaultTarget() != null)
+ {
+ if(getDefaultTarget().isStopped())
+ {
+ try
+ {
+ getDefaultTarget().start();
+ }
+ catch(Exception e)
+ {
+ call.setStatus(Statuses.SERVER_ERROR_INTERNAL);
+ logger.log(Level.SEVERE, "Default Restlet container can't be started", e);
+ }
+ }
+
+ getDefaultTarget().handle(call);
+ }
+ else
+ {
+ call.setStatus(Statuses.SERVER_ERROR_INTERNAL);
+ logger.log(Level.SEVERE, "No default Restlet container defined");
+ }
}
/**
- * Indicates if the handler is started.
- * @return True if the handler is started.
+ * Start hook. Starts all containers.
*/
- public boolean isStarted()
+ public void start() throws Exception
{
- return delegate.isStarted();
- }
+ super.start();
- /**
- * Indicates if the handler is stopped.
- * @return True if the handler is stopped.
- */
- public boolean isStopped()
- {
- return delegate.isStopped();
+ for(Iterator iter = this.containers.keySet().iterator(); iter.hasNext();)
+ {
+ this.containers.get(iter.next()).start();
+ }
}
/**
- * Returns the name of this REST component.
- * @return The name of this REST component.
+ * Stop hook. Stops all containers.
*/
- public String getName()
+ public void stop() throws Exception
{
- return delegate.getName();
- }
+ super.stop();
- /**
- * Returns the description of this REST element.
- * @return The description of this REST element.
- */
- public String getDescription()
- {
- return delegate.getDescription();
+ for(Iterator iter = this.containers.keySet().iterator(); iter.hasNext();)
+ {
+ this.containers.get(iter.next()).stop();
+ }
}
}
diff --git a/source/main/org/restlet/component/RestletContainer.java b/source/main/org/restlet/component/RestletContainer.java
deleted file mode 100644
index f344895df3..0000000000
--- a/source/main/org/restlet/component/RestletContainer.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 2005-2006 Jérôme LOUVEL
- *
- * The contents of this file are subject to the terms
- * of the Common Development and Distribution License
- * (the "License"). You may not use this file except
- * in compliance with the License.
- *
- * You can obtain a copy of the license at
- * http://www.opensource.org/licenses/cddl1.txt
- * See the License for the specific language governing
- * permissions and limitations under the License.
- *
- * When distributing Covered Code, include this CDDL
- * HEADER in each file and include the License file at
- * http://www.opensource.org/licenses/cddl1.txt
- * If applicable, add the following below this CDDL
- * HEADER, with the fields enclosed by brackets "[]"
- * replaced with your own identifying information:
- * Portions Copyright [yyyy] [name of copyright owner]
- */
-
-package org.restlet.component;
-
-import org.restlet.Maplet;
-
-/**
- * Origin server composed of Restlets.<br/>
- * Note that a container is a Maplet itself.<br/>
- * Restlet containers can also be contained within a Restlet server.
- * @see <a href="http://www.restlet.org/tutorial#part05">Tutorial: Restlets servers and containers</a>
- */
-public interface RestletContainer extends OriginServer, Maplet
-{
-}
diff --git a/source/main/org/restlet/component/RestletServer.java b/source/main/org/restlet/component/RestletServer.java
deleted file mode 100644
index 879d325a1e..0000000000
--- a/source/main/org/restlet/component/RestletServer.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright 2005-2006 Jérôme LOUVEL
- *
- * The contents of this file are subject to the terms
- * of the Common Development and Distribution License
- * (the "License"). You may not use this file except
- * in compliance with the License.
- *
- * You can obtain a copy of the license at
- * http://www.opensource.org/licenses/cddl1.txt
- * See the License for the specific language governing
- * permissions and limitations under the License.
- *
- * When distributing Covered Code, include this CDDL
- * HEADER in each file and include the License file at
- * http://www.opensource.org/licenses/cddl1.txt
- * If applicable, add the following below this CDDL
- * HEADER, with the fields enclosed by brackets "[]"
- * replaced with your own identifying information:
- * Portions Copyright [yyyy] [name of copyright owner]
- */
-
-package org.restlet.component;
-
-/**
- * Origin server composed of Restlets containers.<br/>
- * Each container is managing its own resource namespace.
- * @see <a href="http://www.restlet.org/tutorial#part05">Tutorial: Restlets servers and containers</a>
- */
-public interface RestletServer extends OriginServer
-{
- /**
- * Adds a Restlet container.
- * @param name The unique name of the container.
- * @param container The container to add.
- * @return The added container.
- */
- public RestletContainer addContainer(String name, RestletContainer container);
-
- /**
- * Removes a Restlet container.
- * @param name The name of the container to remove.
- */
- public void removeContainer(String name);
-
- /**
- * Returns the default container handling direct calls to the server.
- * @return The default container.
- */
- public RestletContainer getDefaultContainer();
-
- /**
- * Sets the default container handling direct calls to the server.
- * @param container The default container.
- */
- public void setDefaultContainer(RestletContainer container);
-
-}
diff --git a/source/test/com/noelios/restlet/test/RedirectTest.java b/source/test/com/noelios/restlet/test/RedirectTest.java
index 38aab2233c..5630c187f0 100644
--- a/source/test/com/noelios/restlet/test/RedirectTest.java
+++ b/source/test/com/noelios/restlet/test/RedirectTest.java
@@ -31,7 +31,6 @@
import org.restlet.RestletCall;
import org.restlet.Restlet;
import org.restlet.component.DefaultRestletContainer;
-import org.restlet.component.RestletContainer;
import org.restlet.data.MediaTypes;
import org.restlet.data.Method;
import org.restlet.data.Methods;
@@ -53,7 +52,7 @@ public void testRedirect() throws IOException
try
{
// Create a new Restlet container
- RestletContainer myContainer = new DefaultRestletContainer("My container");
+ DefaultRestletContainer myContainer = new DefaultRestletContainer("My container");
// Create the client connectors
myContainer.addClient(Manager.createClient(Protocols.HTTP, "Test client"));
@@ -71,7 +70,7 @@ public void handle(RestletCall call)
{
// Print the requested URI path
String output = "Resource URI: " + call.getResourceRef() + '\n' +
- "Handler path: " + call.getHandlerPath() + '\n' +
+ "Handler path: " + call.getRestletPath() + '\n' +
"Resource path: " + call.getResourcePath() + '\n' +
"Query string: " + call.getResourceRef().getQuery() + '\n' +
"Method name: " + call.getMethod() + '\n';
@@ -111,7 +110,7 @@ public void handle(RestletCall call)
}
}
- private void testCall(RestletContainer myContainer, Method method, String uri)
+ private void testCall(DefaultRestletContainer myContainer, Method method, String uri)
{
try
{
diff --git a/source/tutorial/com/noelios/restlet/tutorial/Tutorial05.java b/source/tutorial/com/noelios/restlet/tutorial/Tutorial05.java
index 8f82fcd885..fd047ec4e7 100644
--- a/source/tutorial/com/noelios/restlet/tutorial/Tutorial05.java
+++ b/source/tutorial/com/noelios/restlet/tutorial/Tutorial05.java
@@ -27,7 +27,6 @@
import org.restlet.RestletCall;
import org.restlet.Restlet;
import org.restlet.component.DefaultRestletContainer;
-import org.restlet.component.RestletContainer;
import org.restlet.connector.Server;
import org.restlet.data.MediaTypes;
import org.restlet.data.Protocols;
@@ -44,7 +43,7 @@ public static void main(String[] args)
try
{
// Create a new Restlet container
- RestletContainer myContainer = new DefaultRestletContainer("My container");
+ DefaultRestletContainer myContainer = new DefaultRestletContainer("My container");
// Create the HTTP server connector, then add it as a server connector
// to the Restlet container. Note that the container is the call handler.
@@ -58,7 +57,7 @@ public void handle(RestletCall call)
{
// Print the requested URI path
String output = "Resource URI: " + call.getResourceRef() + '\n' +
- "Handler path: " + call.getHandlerPath() + '\n' +
+ "Handler path: " + call.getRestletPath() + '\n' +
"Resource path: " + call.getResourcePath() + '\n' +
"Query string: " + call.getResourceRef().getQuery();
call.setOutput(new StringRepresentation(output, MediaTypes.TEXT_PLAIN));
diff --git a/source/tutorial/com/noelios/restlet/tutorial/Tutorial06.java b/source/tutorial/com/noelios/restlet/tutorial/Tutorial06.java
index 335a6dbeb6..d2151d4de2 100644
--- a/source/tutorial/com/noelios/restlet/tutorial/Tutorial06.java
+++ b/source/tutorial/com/noelios/restlet/tutorial/Tutorial06.java
@@ -24,7 +24,6 @@
import org.restlet.Manager;
import org.restlet.component.DefaultRestletContainer;
-import org.restlet.component.RestletContainer;
import org.restlet.connector.Server;
import org.restlet.data.MediaTypes;
import org.restlet.data.Protocols;
@@ -41,7 +40,7 @@ public static void main(String[] args)
try
{
// Create a new Restlet container
- RestletContainer myContainer = new DefaultRestletContainer("My container");
+ DefaultRestletContainer myContainer = new DefaultRestletContainer("My container");
// Create the HTTP server connector, then add it as a server connector
// to the Restlet container. Note that the container is the call handler.
diff --git a/source/tutorial/com/noelios/restlet/tutorial/Tutorial07.java b/source/tutorial/com/noelios/restlet/tutorial/Tutorial07.java
index cb4fc7dc13..781cb241e6 100644
--- a/source/tutorial/com/noelios/restlet/tutorial/Tutorial07.java
+++ b/source/tutorial/com/noelios/restlet/tutorial/Tutorial07.java
@@ -24,7 +24,6 @@
import org.restlet.Manager;
import org.restlet.component.DefaultRestletContainer;
-import org.restlet.component.RestletContainer;
import org.restlet.connector.Server;
import org.restlet.data.MediaTypes;
import org.restlet.data.Protocols;
@@ -42,7 +41,7 @@ public static void main(String[] args)
try
{
// Create a new Restlet container
- RestletContainer myContainer = new DefaultRestletContainer("My container");
+ DefaultRestletContainer myContainer = new DefaultRestletContainer("My container");
// Create the HTTP server connector, then add it as a server connector
// to the Restlet container. Note that the container is the call handler.
diff --git a/source/tutorial/com/noelios/restlet/tutorial/Tutorial08.java b/source/tutorial/com/noelios/restlet/tutorial/Tutorial08.java
index c30e70c354..d325f40a3d 100644
--- a/source/tutorial/com/noelios/restlet/tutorial/Tutorial08.java
+++ b/source/tutorial/com/noelios/restlet/tutorial/Tutorial08.java
@@ -24,7 +24,6 @@
import org.restlet.Manager;
import org.restlet.component.DefaultRestletContainer;
-import org.restlet.component.RestletContainer;
import org.restlet.connector.Server;
import org.restlet.data.MediaTypes;
import org.restlet.data.Protocols;
@@ -43,7 +42,7 @@ public static void main(String[] args)
try
{
// Create a new Restlet container
- RestletContainer myContainer = new DefaultRestletContainer("My container");
+ DefaultRestletContainer myContainer = new DefaultRestletContainer("My container");
// Create the HTTP server connector, then add it as a server connector
// to the Restlet container. Note that the container is the call handler.
diff --git a/source/tutorial/com/noelios/restlet/tutorial/Tutorial09a.java b/source/tutorial/com/noelios/restlet/tutorial/Tutorial09a.java
index 91fb989b57..c0cd08994b 100644
--- a/source/tutorial/com/noelios/restlet/tutorial/Tutorial09a.java
+++ b/source/tutorial/com/noelios/restlet/tutorial/Tutorial09a.java
@@ -25,7 +25,6 @@
import org.restlet.Manager;
import org.restlet.RestletCall;
import org.restlet.component.DefaultRestletContainer;
-import org.restlet.component.RestletContainer;
import org.restlet.connector.Server;
import org.restlet.data.ChallengeSchemes;
import org.restlet.data.MediaTypes;
@@ -46,7 +45,7 @@ public static void main(String[] args)
try
{
// Create a new Restlet container
- RestletContainer myContainer = new DefaultRestletContainer("My container");
+ DefaultRestletContainer myContainer = new DefaultRestletContainer("My container");
// Create the HTTP server connector, then add it as a server connector
// to the Restlet container. Note that the container is the call handler.
diff --git a/source/tutorial/com/noelios/restlet/tutorial/Tutorial10.java b/source/tutorial/com/noelios/restlet/tutorial/Tutorial10.java
index e02c7ea44d..383639191f 100644
--- a/source/tutorial/com/noelios/restlet/tutorial/Tutorial10.java
+++ b/source/tutorial/com/noelios/restlet/tutorial/Tutorial10.java
@@ -24,7 +24,6 @@
import org.restlet.Manager;
import org.restlet.component.DefaultRestletContainer;
-import org.restlet.component.RestletContainer;
import org.restlet.connector.Server;
import org.restlet.data.Protocols;
@@ -40,7 +39,7 @@ public static void main(String[] args)
try
{
// Create a new Restlet container
- RestletContainer myContainer = new DefaultRestletContainer("My container");
+ DefaultRestletContainer myContainer = new DefaultRestletContainer("My container");
// Create the HTTP server connector, then add it as a server connector
// to the Restlet container. Note that the container is the call handler.
diff --git a/source/tutorial/com/noelios/restlet/tutorial/Tutorial11.java b/source/tutorial/com/noelios/restlet/tutorial/Tutorial11.java
index 5c4161a2be..ad6c8cbab9 100644
--- a/source/tutorial/com/noelios/restlet/tutorial/Tutorial11.java
+++ b/source/tutorial/com/noelios/restlet/tutorial/Tutorial11.java
@@ -31,7 +31,6 @@
import org.restlet.RestletCall;
import org.restlet.Restlet;
import org.restlet.component.DefaultRestletContainer;
-import org.restlet.component.RestletContainer;
import org.restlet.connector.Server;
import org.restlet.data.ChallengeSchemes;
import org.restlet.data.MediaTypes;
@@ -53,7 +52,7 @@ public static void main(String[] args)
try
{
// Create a new Restlet container
- RestletContainer myContainer = new DefaultRestletContainer("My container");
+ DefaultRestletContainer myContainer = new DefaultRestletContainer("My container");
// Create the HTTP server connector, then add it as a server connector
// to the Restlet container. Note that the container is the call handler.
@@ -103,7 +102,7 @@ public void handle(RestletCall call)
if(call.getResourcePath().equals(""))
{
// Print the requested URI path
- String output = "Account of user named: " + call.getHandlerRef().getLastSegment();
+ String output = "Account of user named: " + call.getRestletRef().getLastSegment();
call.setOutput(new StringRepresentation(output, MediaTypes.TEXT_PLAIN));
}
else
@@ -121,7 +120,7 @@ public void handle(RestletCall call)
public void handle(RestletCall call)
{
// Print the user name of the requested orders
- List<String> segments = call.getHandlerRef().getSegments();
+ List<String> segments = call.getRestletRef().getSegments();
String output = "Orders of user named: " + segments.get(segments.size() - 2);
call.setOutput(new StringRepresentation(output, MediaTypes.TEXT_PLAIN));
}
|
581b19f0fef9fbd7f4a5e2d6569b8eca09bf28e1
|
ReactiveX-RxJava
|
Fix multiple subscription bug on operation filter--A new subscription must be created on every subscribe call, otherwise any-subscribe call after the first directly fails.-
|
c
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/operators/OperationFilter.java b/rxjava-core/src/main/java/rx/operators/OperationFilter.java
index 3d6c0ffb79..64cd70d1ff 100644
--- a/rxjava-core/src/main/java/rx/operators/OperationFilter.java
+++ b/rxjava-core/src/main/java/rx/operators/OperationFilter.java
@@ -37,7 +37,6 @@ private static class Filter<T> implements Func1<Observer<T>, Subscription> {
private final Observable<T> that;
private final Func1<T, Boolean> predicate;
- private final AtomicObservableSubscription subscription = new AtomicObservableSubscription();
public Filter(Observable<T> that, Func1<T, Boolean> predicate) {
this.that = that;
@@ -45,6 +44,7 @@ public Filter(Observable<T> that, Func1<T, Boolean> predicate) {
}
public Subscription call(final Observer<T> observer) {
+ final AtomicObservableSubscription subscription = new AtomicObservableSubscription();
return subscription.wrap(that.subscribe(new Observer<T>() {
public void onNext(T value) {
try {
|
45529d30d41b8de338dfc2d26b61d1a79113c766
|
drools
|
JBRULES-2934 Add NewRegisterWorkItemHandlerCommand- to CommandFactory with unit test--
|
a
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/test/java/org/drools/command/RegisterWorkItemHandlerTest.java b/drools-compiler/src/test/java/org/drools/command/RegisterWorkItemHandlerTest.java
new file mode 100644
index 00000000000..ec51784363d
--- /dev/null
+++ b/drools-compiler/src/test/java/org/drools/command/RegisterWorkItemHandlerTest.java
@@ -0,0 +1,64 @@
+package org.drools.command;
+
+import static org.junit.Assert.*;
+
+import org.drools.KnowledgeBase;
+import org.drools.KnowledgeBaseFactory;
+import org.drools.builder.KnowledgeBuilder;
+import org.drools.builder.KnowledgeBuilderFactory;
+import org.drools.builder.ResourceType;
+import org.drools.io.ResourceFactory;
+import org.drools.process.instance.WorkItem;
+import org.drools.process.instance.impl.DefaultWorkItemManager;
+import org.drools.process.instance.impl.WorkItemImpl;
+import org.drools.runtime.StatelessKnowledgeSession;
+import org.drools.runtime.process.WorkItemHandler;
+import org.drools.runtime.process.WorkItemManager;
+import org.junit.Test;
+
+public class RegisterWorkItemHandlerTest {
+
+ @Test
+ public void testRegisterWorkItemHandlerWithStatelessSession() {
+ String str =
+ "package org.drools.workitem.test \n" +
+ "import " + DefaultWorkItemManager.class.getCanonicalName() + "\n" +
+ "import " + WorkItem.class.getCanonicalName() + "\n" +
+ "import " + WorkItemImpl.class.getCanonicalName() + "\n" +
+ "rule r1 when \n" +
+ "then \n" +
+ " WorkItem wi = new WorkItemImpl(); \n" +
+ " wi.setName( \"wihandler\" ); \n" +
+ " DefaultWorkItemManager wim = ( DefaultWorkItemManager ) kcontext.getKnowledgeRuntime().getWorkItemManager(); \n" +
+ " wim.internalExecuteWorkItem(wi); \n" +
+ "end \n";
+
+ KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
+ kbuilder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL );
+
+ if ( kbuilder.hasErrors() ) {
+ fail( kbuilder.getErrors().toString() );
+ }
+
+ KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase();
+ kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
+
+ final boolean[] answer = new boolean[] { false };
+ StatelessKnowledgeSession ks = kbase.newStatelessKnowledgeSession();
+ ks.execute( CommandFactory.newRegisterWorkItemHandlerCommand( new WorkItemHandler() {
+
+ public void executeWorkItem(org.drools.runtime.process.WorkItem workItem,
+ WorkItemManager manager) {
+ answer[0] = true;
+ }
+
+ public void abortWorkItem(org.drools.runtime.process.WorkItem workItem,
+ WorkItemManager manager) {
+ // TODO Auto-generated method stub
+
+ }
+ }, "wihandler" ) );
+
+ assertTrue( answer[0] );
+ }
+}
diff --git a/drools-core/src/main/java/org/drools/command/impl/CommandFactoryServiceImpl.java b/drools-core/src/main/java/org/drools/command/impl/CommandFactoryServiceImpl.java
index a3ab9b350db..d21b72a90bc 100644
--- a/drools-core/src/main/java/org/drools/command/impl/CommandFactoryServiceImpl.java
+++ b/drools-core/src/main/java/org/drools/command/impl/CommandFactoryServiceImpl.java
@@ -32,6 +32,7 @@
import org.drools.command.runtime.KBuilderSetPropertyCommand;
import org.drools.command.runtime.process.AbortWorkItemCommand;
import org.drools.command.runtime.process.CompleteWorkItemCommand;
+import org.drools.command.runtime.process.RegisterWorkItemHandlerCommand;
import org.drools.command.runtime.process.SignalEventCommand;
import org.drools.command.runtime.process.StartProcessCommand;
import org.drools.command.runtime.rule.FireAllRulesCommand;
@@ -45,6 +46,7 @@
import org.drools.command.runtime.rule.RetractCommand;
import org.drools.command.runtime.rule.ModifyCommand.SetterImpl;
import org.drools.runtime.ObjectFilter;
+import org.drools.runtime.process.WorkItemHandler;
import org.drools.runtime.rule.FactHandle;
public class CommandFactoryServiceImpl implements CommandFactoryService {
@@ -186,6 +188,11 @@ public Command newAbortWorkItem(long workItemId) {
return new AbortWorkItemCommand( workItemId);
}
+ public Command newRegisterWorkItemHandlerCommand(WorkItemHandler handler,
+ String workItemName) {
+ return new RegisterWorkItemHandlerCommand( workItemName, handler );
+ }
+
public Command newQuery(String identifier, String name) {
return new QueryCommand(identifier, name, null);
}
@@ -198,10 +205,15 @@ public BatchExecutionCommand newBatchExecution(List<? extends Command> commands,
return new BatchExecutionCommandImpl((List<GenericCommand<?>>) commands, lookup);
}
+ @Deprecated
public Command newKBuilderSetPropertyCommand(String id, String name, String value) {
return new KBuilderSetPropertyCommand(id, name, value);
}
+ public Command newKnowledgeBuilderSetPropertyCommand(String id, String name, String value) {
+ return new KBuilderSetPropertyCommand(id, name, value);
+ }
+
public Command newNewKnowledgeBuilderConfigurationCommand(String localId){
return new NewKnowledgeBuilderConfigurationCommand(localId);
}
@@ -214,4 +226,6 @@ public Command<FactHandle> fromExternalFactHandleCommand(String factHandleExtern
boolean disconnected) {
return new FromExternalFactHandleCommand(factHandleExternalForm, disconnected);
}
+
+
}
|
5a91d607882e59a6255eff0f144a6efecc749af2
|
spring-framework
|
Allow setting WSDL document as a Resource--Prior to this change, LocalJaxWsServiceFactory allowed specifying a WSDL-document URL. Now users may also specify a WSDL document represented as-a Spring Resource object for convenience.--Issue: SPR-9909-
|
a
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-web/src/main/java/org/springframework/remoting/jaxws/LocalJaxWsServiceFactory.java b/spring-web/src/main/java/org/springframework/remoting/jaxws/LocalJaxWsServiceFactory.java
index 3e8cf74b9024..7f95b3acd651 100644
--- a/spring-web/src/main/java/org/springframework/remoting/jaxws/LocalJaxWsServiceFactory.java
+++ b/spring-web/src/main/java/org/springframework/remoting/jaxws/LocalJaxWsServiceFactory.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2010 the original author or authors.
+ * Copyright 2002-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -16,12 +16,14 @@
package org.springframework.remoting.jaxws;
+import java.io.IOException;
import java.net.URL;
import java.util.concurrent.Executor;
import javax.xml.namespace.QName;
import javax.xml.ws.Service;
import javax.xml.ws.handler.HandlerResolver;
+import org.springframework.core.io.Resource;
import org.springframework.util.Assert;
/**
@@ -53,11 +55,22 @@ public class LocalJaxWsServiceFactory {
/**
* Set the URL of the WSDL document that describes the service.
+ * @see #setWsdlDocumentResource(Resource)
*/
public void setWsdlDocumentUrl(URL wsdlDocumentUrl) {
this.wsdlDocumentUrl = wsdlDocumentUrl;
}
+ /**
+ * Set the WSDL document URL as a {@link Resource}.
+ * @throws IOException
+ * @since 3.2
+ */
+ public void setWsdlDocumentResource(Resource wsdlDocumentResource) throws IOException {
+ Assert.notNull(wsdlDocumentResource, "WSDL Resource must not be null.");
+ this.wsdlDocumentUrl = wsdlDocumentResource.getURL();
+ }
+
/**
* Return the URL of the WSDL document that describes the service.
*/
|
6edd54805c2d558802b22a232ea1a751f82db029
|
orientdb
|
Changed log level to debug--
|
p
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/common/concur/lock/OAdaptiveLock.java b/core/src/main/java/com/orientechnologies/common/concur/lock/OAdaptiveLock.java
index adf7f4b1587..c1bf5ff3e0d 100755
--- a/core/src/main/java/com/orientechnologies/common/concur/lock/OAdaptiveLock.java
+++ b/core/src/main/java/com/orientechnologies/common/concur/lock/OAdaptiveLock.java
@@ -129,7 +129,7 @@ public void close() {
if (lock.isLocked())
lock.unlock();
} catch (Exception e) {
- OLogManager.instance().error(this, "Can not unlock lock", e);
+ OLogManager.instance().debug(this, "Cannot unlock a lock", e);
}
}
diff --git a/enterprise/src/main/java/com/orientechnologies/orient/enterprise/channel/OChannel.java b/enterprise/src/main/java/com/orientechnologies/orient/enterprise/channel/OChannel.java
index 723e54ac98e..b3bb39f60b1 100755
--- a/enterprise/src/main/java/com/orientechnologies/orient/enterprise/channel/OChannel.java
+++ b/enterprise/src/main/java/com/orientechnologies/orient/enterprise/channel/OChannel.java
@@ -149,7 +149,7 @@ public synchronized void close() {
socket = null;
}
} catch (Exception e) {
- OLogManager.instance().error(this, "Error during socket close", e);
+ OLogManager.instance().debug(this, "Error during socket close", e);
}
try {
@@ -158,7 +158,7 @@ public synchronized void close() {
inStream = null;
}
} catch (Exception e) {
- OLogManager.instance().error(this, "Error during closing of input stream", e);
+ OLogManager.instance().debug(this, "Error during closing of input stream", e);
}
try {
@@ -167,14 +167,14 @@ public synchronized void close() {
outStream = null;
}
} catch (Exception e) {
- OLogManager.instance().error(this, "Error during closing of output stream", e);
+ OLogManager.instance().debug(this, "Error during closing of output stream", e);
}
for (OChannelListener l : getListenersCopy())
try {
l.onChannelClose(this);
} catch (Exception e) {
- OLogManager.instance().error(this, "Error during closing of channel close listener", e);
+ OLogManager.instance().debug(this, "Error during closing of channel close listener", e);
}
lockRead.close();
diff --git a/enterprise/src/main/java/com/orientechnologies/orient/enterprise/channel/binary/OChannelBinary.java b/enterprise/src/main/java/com/orientechnologies/orient/enterprise/channel/binary/OChannelBinary.java
index 2eb8aca4ae8..ea44a463163 100755
--- a/enterprise/src/main/java/com/orientechnologies/orient/enterprise/channel/binary/OChannelBinary.java
+++ b/enterprise/src/main/java/com/orientechnologies/orient/enterprise/channel/binary/OChannelBinary.java
@@ -398,7 +398,7 @@ public void close() {
// in = null;
}
} catch (IOException e) {
- OLogManager.instance().error(this, "Error during closing of input stream", e);
+ OLogManager.instance().debug(this, "Error during closing of input stream", e);
}
try {
@@ -407,7 +407,7 @@ public void close() {
// out = null;
}
} catch (IOException e) {
- OLogManager.instance().error(this, "Error during closing of output stream", e);
+ OLogManager.instance().debug(this, "Error during closing of output stream", e);
}
super.close();
diff --git a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/OBinaryNetworkProtocolAbstract.java b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/OBinaryNetworkProtocolAbstract.java
index e9a0fe5fe12..db52df97f9f 100755
--- a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/OBinaryNetworkProtocolAbstract.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/OBinaryNetworkProtocolAbstract.java
@@ -396,7 +396,7 @@ protected void handleConnectionError(final OChannelBinaryServer channel, final T
try {
channel.flush();
} catch (IOException e1) {
- OLogManager.instance().error(this, "Error during channel flush", e1);
+ OLogManager.instance().debug(this, "Error during channel flush", e1);
}
}
|
6ceadd85dc54ab7e5a58cf51d50289055f3d6bb9
|
hbase
|
HBASE-3387 Pair does not deep check arrays for- equality--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1053484 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index afe42f6b85f0..fa3639e56a35 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -40,6 +40,8 @@ Release 0.91.0 - Unreleased
function
HBASE-3260 Coprocessors: Add explicit lifecycle management
HBASE-3377 Upgrade Jetty to 6.1.26
+ HBASE-3387 Pair does not deep check arrays for equality
+ (Jesse Yates via Stack)
NEW FEATURES
diff --git a/src/main/java/org/apache/hadoop/hbase/util/Pair.java b/src/main/java/org/apache/hadoop/hbase/util/Pair.java
index ff296b6bda2b..a8779ce56bff 100644
--- a/src/main/java/org/apache/hadoop/hbase/util/Pair.java
+++ b/src/main/java/org/apache/hadoop/hbase/util/Pair.java
@@ -21,6 +21,7 @@
package org.apache.hadoop.hbase.util;
import java.io.Serializable;
+import java.lang.reflect.Array;
/**
* A generic class for pairs.
@@ -87,9 +88,35 @@ public T2 getSecond()
return second;
}
- private static boolean equals(Object x, Object y)
- {
- return (x == null && y == null) || (x != null && x.equals(y));
+ private static boolean equals(Object x, Object y) {
+ if (x == null && y == null)
+ return true;
+
+ if (x != null && y != null) {
+ if (x.getClass().equals(y.getClass())) {
+ if (x.getClass().isArray() && y.getClass().isArray()) {
+
+ int len = Array.getLength(x) == Array.getLength(y) ? Array
+ .getLength(x) : -1;
+ if (len < 0)
+ return false;
+
+ for (int i = 0; i < len; i++) {
+
+ Object xi = Array.get(x, i);
+ Object yi = Array.get(y, i);
+
+ if (!xi.equals(yi))
+ return false;
+ }
+ return true;
+ } else {
+ return x.equals(y);
+ }
+ }
+ }
+ return false;
+
}
@Override
diff --git a/src/test/java/org/apache/hadoop/hbase/util/TestPairEquals.java b/src/test/java/org/apache/hadoop/hbase/util/TestPairEquals.java
new file mode 100644
index 000000000000..d546cf589939
--- /dev/null
+++ b/src/test/java/org/apache/hadoop/hbase/util/TestPairEquals.java
@@ -0,0 +1,79 @@
+/**
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.util;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import org.junit.Test;
+
+/**
+ * Testing Testing {@link Pair#equals(Object)} for deep checking of arrays
+ */
+public class TestPairEquals {
+
+ /**
+ * Testing {@link Pair#equals(Object)} for deep checking of arrays
+ */
+ @Test
+ public void testEquals() {
+ Pair<String, String> p1 = new Pair<String, String>("Hello", "World");
+ Pair<String, String> p1a = new Pair<String, String>("Hello", "World");
+ assertTrue(p1.equals(p1a));
+
+ Pair<String, byte[]> p2 = new Pair<String, byte[]>("Hello", new byte[] { 1,
+ 0, 5 });
+ Pair<String, byte[]> p2a = new Pair<String, byte[]>("Hello", new byte[] {
+ 1, 0, 5 });
+ // Previously this test would fail as they are two different pointers to
+ // arrays that inherently the same.
+ assertTrue(p2.equals(p2a));
+
+ Pair<char[], String> p3 = new Pair<char[], String>(new char[] { 'h', 'e' },
+ "world");
+ assertTrue(p3.equals(p3));
+
+ // These kinds of tests will still fail as they have fundamentally different
+ // elements
+ Pair<Character[], String> p4 = new Pair<Character[], String>(
+ new Character[] { new Character('h'), new Character('e') }, "world");
+ // checking for autoboxing non-equality to the original class
+ assertFalse(p3.equals(p4));
+
+ // still fail for a different autoboxing situation (just to prove that it
+ // is not just chars)
+ Pair<String, Integer[]> p5 = new Pair<String, Integer[]>("hello",
+ new Integer[] { new Integer(1), new Integer(982) });
+ Pair<String, int[]> p5a = new Pair<String, int[]>("hello", new int[] { 1,
+ 982 });
+ assertFalse(p5.equals(p5a));
+
+ // will still fail for that different things
+ Pair<String, byte[]> p6 = new Pair<String, byte[]>("Hello", new byte[] { 1,
+ 0, 4 });
+ assertFalse(p2.equals(p6));
+
+ // will still fail for the other predicate being different
+ Pair<String, byte[]> p7 = new Pair<String, byte[]>("World", new byte[] { 1,
+ 0, 5 });
+ assertFalse(p2.equals(p7));
+ }
+}
|
e3665e19679795274e48a1d575ab08bc6e4d706c
|
hbase
|
HBASE-1142 Cleanup thrift server; remove Text and- profuse DEBUG messaging--git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@736495 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index 7ae14a42862f..638d69cf6ef9 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -21,6 +21,8 @@ Release 0.20.0 - Unreleased
part 2, and part 3 (Evgeny Ryabitskiy via Stack)
HBASE-896 Update jruby from 1.1.2 to 1.1.6
HBASE-1031 Add the Zookeeper jar
+ HBASE-1142 Cleanup thrift server; remove Text and profuse DEBUG messaging
+ (Tim Sell via Stack)
Release 0.19.0 - Unreleased
INCOMPATIBLE CHANGES
diff --git a/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java b/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
index 0d25e2c21289..046d819ca889 100644
--- a/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
+++ b/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
@@ -19,7 +19,6 @@
package org.apache.hadoop.hbase.thrift;
import java.io.IOException;
-import java.nio.charset.MalformedInputException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -53,7 +52,6 @@
import org.apache.hadoop.hbase.thrift.generated.TCell;
import org.apache.hadoop.hbase.thrift.generated.TRowResult;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.io.Text;
import com.facebook.thrift.TException;
import com.facebook.thrift.protocol.TBinaryProtocol;
@@ -93,7 +91,7 @@ public static class HBaseHandler implements Hbase.Iface {
*/
protected HTable getTable(final byte[] tableName) throws IOError,
IOException {
- return new HTable(this.conf, getText(tableName));
+ return new HTable(this.conf, tableName);
}
/**
@@ -141,33 +139,7 @@ protected synchronized Scanner removeScanner(int id) {
scannerMap = new HashMap<Integer, Scanner>();
}
- /**
- * Converts a byte array to a Text object after validating the UTF-8
- * encoding.
- *
- * @param buf
- * UTF-8 encoded bytes
- * @return Text object
- * @throws IllegalArgument
- * @throws IOError
- */
- byte [] getText(byte[] buf) throws IOError {
- try {
- Text.validateUTF8(buf);
- } catch (MalformedInputException e) {
- throw new IOError("invalid UTF-8 encoding in row or column name");
- }
- return buf;
- }
-
- //
- // The Thrift Hbase.Iface interface is implemented below.
- // Documentation for the methods and datastructures is the Hbase.thrift file
- // used to generate the interface.
- //
-
public void enableTable(final byte[] tableName) throws IOError {
- LOG.debug("enableTable");
try{
admin.enableTable(tableName);
} catch (IOException e) {
@@ -176,7 +148,6 @@ public void enableTable(final byte[] tableName) throws IOError {
}
public void disableTable(final byte[] tableName) throws IOError{
- LOG.debug("disableTable");
try{
admin.disableTable(tableName);
} catch (IOException e) {
@@ -185,7 +156,6 @@ public void disableTable(final byte[] tableName) throws IOError{
}
public boolean isTableEnabled(final byte[] tableName) throws IOError {
- LOG.debug("isTableEnabled");
try {
return HTable.isTableEnabled(tableName);
} catch (IOException e) {
@@ -194,7 +164,6 @@ public boolean isTableEnabled(final byte[] tableName) throws IOError {
}
public List<byte[]> getTableNames() throws IOError {
- LOG.debug("getTableNames");
try {
HTableDescriptor[] tables = this.admin.listTables();
ArrayList<byte[]> list = new ArrayList<byte[]>(tables.length);
@@ -210,8 +179,6 @@ public List<byte[]> getTableNames() throws IOError {
public List<TRegionInfo> getTableRegions(byte[] tableName)
throws IOError {
try{
- LOG.debug("getTableRegions: " + new String(tableName));
-
HTable table = getTable(tableName);
Map<HRegionInfo, HServerAddress> regionsInfo = table.getRegionsInfo();
List<TRegionInfo> regions = new ArrayList<TRegionInfo>();
@@ -233,13 +200,9 @@ public List<TRegionInfo> getTableRegions(byte[] tableName)
public TCell get(byte[] tableName, byte[] row, byte[] column)
throws NotFound, IOError {
- if (LOG.isDebugEnabled()) {
- LOG.debug("get: table=" + new String(tableName) + ", row="
- + new String(row) + ", col=" + new String(column));
- }
try {
HTable table = getTable(tableName);
- Cell cell = table.get(getText(row), getText(column));
+ Cell cell = table.get(row, column);
if (cell == null) {
throw new NotFound();
}
@@ -251,15 +214,10 @@ public TCell get(byte[] tableName, byte[] row, byte[] column)
public List<TCell> getVer(byte[] tableName, byte[] row,
byte[] column, int numVersions) throws IOError, NotFound {
- if (LOG.isDebugEnabled()) {
- LOG.debug("getVer: table=" + new String(tableName) + ", row="
- + new String(row) + ", col=" + new String(column) + ", numVers="
- + numVersions);
- }
try {
HTable table = getTable(tableName);
Cell[] cells =
- table.get(getText(row), getText(column), numVersions);
+ table.get(row, column, numVersions);
if (cells == null) {
throw new NotFound();
}
@@ -276,15 +234,9 @@ public List<TCell> getVer(byte[] tableName, byte[] row,
public List<TCell> getVerTs(byte[] tableName, byte[] row,
byte[] column, long timestamp, int numVersions) throws IOError,
NotFound {
- if (LOG.isDebugEnabled()) {
- LOG.debug("getVerTs: table=" + new String(tableName) + ", row="
- + new String(row) + ", col=" + new String(column) + ", ts="
- + timestamp + ", numVers=" + numVersions);
- }
try {
HTable table = getTable(tableName);
- Cell[] cells = table.get(getText(row),
- getText(column), timestamp, numVersions);
+ Cell[] cells = table.get(row, column, timestamp, numVersions);
if (cells == null) {
throw new NotFound();
}
@@ -318,18 +270,14 @@ public TRowResult getRowTs(byte[] tableName, byte[] row,
public TRowResult getRowWithColumnsTs(byte[] tableName, byte[] row,
List<byte[]> columns, long timestamp) throws IOError {
- if (LOG.isDebugEnabled()) {
- LOG.debug("getRowTs: table=" + new String(tableName) + ", row="
- + new String(row) + ", ts=" + timestamp);
- }
try {
HTable table = getTable(tableName);
if (columns == null) {
- return ThriftUtilities.rowResultFromHBase(table.getRow(getText(row),
+ return ThriftUtilities.rowResultFromHBase(table.getRow(row,
timestamp));
} else {
byte[][] columnArr = columns.toArray(new byte[columns.size()][]);
- return ThriftUtilities.rowResultFromHBase(table.getRow(getText(row),
+ return ThriftUtilities.rowResultFromHBase(table.getRow(row,
columnArr, timestamp));
}
} catch (IOException e) {
@@ -344,14 +292,9 @@ public void deleteAll(byte[] tableName, byte[] row, byte[] column)
public void deleteAllTs(byte[] tableName, byte[] row, byte[] column,
long timestamp) throws IOError {
- if (LOG.isDebugEnabled()) {
- LOG.debug("deleteAllTs: table=" + new String(tableName) + ", row="
- + new String(row) + ", col=" + new String(column) + ", ts="
- + timestamp);
- }
try {
HTable table = getTable(tableName);
- table.deleteAll(getText(row), getText(column), timestamp);
+ table.deleteAll(row, column, timestamp);
} catch (IOException e) {
throw new IOError(e.getMessage());
}
@@ -363,13 +306,9 @@ public void deleteAllRow(byte[] tableName, byte[] row) throws IOError {
public void deleteAllRowTs(byte[] tableName, byte[] row, long timestamp)
throws IOError {
- if (LOG.isDebugEnabled()) {
- LOG.debug("deleteAllRowTs: table=" + new String(tableName) + ", row="
- + new String(row) + ", ts=" + timestamp);
- }
try {
HTable table = getTable(tableName);
- table.deleteAll(getText(row), timestamp);
+ table.deleteAll(row, timestamp);
} catch (IOException e) {
throw new IOError(e.getMessage());
}
@@ -378,21 +317,14 @@ public void deleteAllRowTs(byte[] tableName, byte[] row, long timestamp)
public void createTable(byte[] tableName,
List<ColumnDescriptor> columnFamilies) throws IOError,
IllegalArgument, AlreadyExists {
- if (LOG.isDebugEnabled()) {
- LOG.debug("createTable: table=" + new String(tableName));
- }
try {
- byte [] tableStr = getText(tableName);
- if (admin.tableExists(tableStr)) {
+ if (admin.tableExists(tableName)) {
throw new AlreadyExists("table name already in use");
}
- HTableDescriptor desc = new HTableDescriptor(tableStr);
+ HTableDescriptor desc = new HTableDescriptor(tableName);
for (ColumnDescriptor col : columnFamilies) {
HColumnDescriptor colDesc = ThriftUtilities.colDescFromThrift(col);
desc.addFamily(colDesc);
- if (LOG.isDebugEnabled()) {
- LOG.debug("createTable: col=" + new String(colDesc.getName()));
- }
}
admin.createTable(desc);
} catch (IOException e) {
@@ -407,11 +339,10 @@ public void deleteTable(byte[] tableName) throws IOError, NotFound {
LOG.debug("deleteTable: table=" + new String(tableName));
}
try {
- byte [] tableStr = getText(tableName);
- if (!admin.tableExists(tableStr)) {
+ if (!admin.tableExists(tableName)) {
throw new NotFound();
}
- admin.deleteTable(tableStr);
+ admin.deleteTable(tableName);
} catch (IOException e) {
throw new IOError(e.getMessage());
}
@@ -424,29 +355,15 @@ public void mutateRow(byte[] tableName, byte[] row,
public void mutateRowTs(byte[] tableName, byte[] row,
List<Mutation> mutations, long timestamp) throws IOError, IllegalArgument {
- if (LOG.isDebugEnabled()) {
- LOG.debug("mutateRowTs: table=" + new String(tableName) + ", row="
- + new String(row) + ", ts=" + timestamp + " mutations="
- + mutations.size());
- for (Mutation m : mutations) {
- if (m.isDelete) {
- LOG.debug("mutateRowTs: : delete - " + new String(getText(m.column)));
- } else {
- LOG.debug("mutateRowTs: : put - " + new String(getText(m.column)) + " => "
- + new String(m.value));
- }
- }
- }
-
HTable table = null;
try {
table = getTable(tableName);
- BatchUpdate batchUpdate = new BatchUpdate(getText(row), timestamp);
+ BatchUpdate batchUpdate = new BatchUpdate(row, timestamp);
for (Mutation m : mutations) {
if (m.isDelete) {
- batchUpdate.delete(getText(m.column));
+ batchUpdate.delete(m.column);
} else {
- batchUpdate.put(getText(m.column), m.value);
+ batchUpdate.put(m.column, m.value);
}
}
table.commit(batchUpdate);
@@ -466,30 +383,15 @@ public void mutateRowsTs(byte[] tableName, List<BatchMutation> rowBatches, long
throws IOError, IllegalArgument, TException {
List<BatchUpdate> batchUpdates = new ArrayList<BatchUpdate>();
- if (LOG.isDebugEnabled()) {
- LOG.debug("mutateRowsTs: table=" + new String(tableName) + ", rows="
- + rowBatches.size() + ", ts=" + timestamp);
- }
for (BatchMutation batch : rowBatches) {
byte[] row = batch.row;
List<Mutation> mutations = batch.mutations;
- if (LOG.isDebugEnabled()) {
- LOG.debug("mutateRowsTs: : row=" + new String(row) + " mutations=" + mutations.size());
- for (Mutation m : mutations) {
- if (m.isDelete) {
- LOG.debug("mutateRowsTs: : delete - " + new String(getText(m.column)));
- } else {
- LOG.debug("mutateRowsTs: : put - " + new String(getText(m.column)) + " => "
- + new String(m.value));
- }
- }
- }
- BatchUpdate batchUpdate = new BatchUpdate(getText(row), timestamp);
+ BatchUpdate batchUpdate = new BatchUpdate(row, timestamp);
for (Mutation m : mutations) {
if (m.isDelete) {
- batchUpdate.delete(getText(m.column));
+ batchUpdate.delete(m.column);
} else {
- batchUpdate.put(getText(m.column), m.value);
+ batchUpdate.put(m.column, m.value);
}
}
batchUpdates.add(batchUpdate);
@@ -539,18 +441,10 @@ public TRowResult scannerGet(int id) throws IllegalArgument, NotFound,
public int scannerOpen(byte[] tableName, byte[] startRow,
List<byte[]> columns) throws IOError {
- if (LOG.isDebugEnabled()) {
- LOG.debug("scannerOpen: table=" + new String(getText(tableName)) + ", start="
- + new String(getText(startRow)) + ", columns=" + columns.toString());
- }
try {
HTable table = getTable(tableName);
- byte [][] columnsText = new byte[columns.size()][];
- for (int i = 0; i < columns.size(); ++i) {
- columnsText[i] = getText(columns.get(i));
- }
- Scanner scanner = table.getScanner(columnsText,
- getText(startRow));
+ Scanner scanner = table.getScanner(columns.toArray(new byte[0][]),
+ startRow);
return addScanner(scanner);
} catch (IOException e) {
throw new IOError(e.getMessage());
@@ -559,19 +453,10 @@ public int scannerOpen(byte[] tableName, byte[] startRow,
public int scannerOpenWithStop(byte[] tableName, byte[] startRow,
byte[] stopRow, List<byte[]> columns) throws IOError, TException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("scannerOpen: table=" + new String(getText(tableName)) + ", start="
- + new String(getText(startRow)) + ", stop=" + new String(getText(stopRow)) + ", columns="
- + columns.toString());
- }
try {
HTable table = getTable(tableName);
- byte [][] columnsText = new byte[columns.size()][];
- for (int i = 0; i < columns.size(); ++i) {
- columnsText[i] = getText(columns.get(i));
- }
- Scanner scanner = table.getScanner(columnsText,
- getText(startRow), getText(stopRow));
+ Scanner scanner = table.getScanner(columns.toArray(new byte[0][]),
+ startRow, stopRow);
return addScanner(scanner);
} catch (IOException e) {
throw new IOError(e.getMessage());
@@ -580,19 +465,10 @@ public int scannerOpenWithStop(byte[] tableName, byte[] startRow,
public int scannerOpenTs(byte[] tableName, byte[] startRow,
List<byte[]> columns, long timestamp) throws IOError, TException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("scannerOpen: table=" + new String(getText(tableName)) + ", start="
- + new String(getText(startRow)) + ", columns=" + columns.toString()
- + ", timestamp=" + timestamp);
- }
try {
HTable table = getTable(tableName);
- byte [][] columnsText = new byte[columns.size()][];
- for (int i = 0; i < columns.size(); ++i) {
- columnsText[i] = getText(columns.get(i));
- }
- Scanner scanner = table.getScanner(columnsText,
- getText(startRow), timestamp);
+ Scanner scanner = table.getScanner(columns.toArray(new byte[0][]),
+ startRow, timestamp);
return addScanner(scanner);
} catch (IOException e) {
throw new IOError(e.getMessage());
@@ -602,19 +478,10 @@ public int scannerOpenTs(byte[] tableName, byte[] startRow,
public int scannerOpenWithStopTs(byte[] tableName, byte[] startRow,
byte[] stopRow, List<byte[]> columns, long timestamp)
throws IOError, TException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("scannerOpen: table=" + new String(getText(tableName)) + ", start="
- + new String(getText(startRow)) + ", stop=" + new String(getText(stopRow)) + ", columns="
- + columns.toString() + ", timestamp=" + timestamp);
- }
try {
HTable table = getTable(tableName);
- byte [][] columnsText = new byte[columns.size()][];
- for (int i = 0; i < columns.size(); ++i) {
- columnsText[i] = getText(columns.get(i));
- }
- Scanner scanner = table.getScanner(columnsText,
- getText(startRow), getText(stopRow), timestamp);
+ Scanner scanner = table.getScanner(columns.toArray(new byte[0][]),
+ startRow, stopRow, timestamp);
return addScanner(scanner);
} catch (IOException e) {
throw new IOError(e.getMessage());
@@ -623,9 +490,6 @@ public int scannerOpenWithStopTs(byte[] tableName, byte[] startRow,
public Map<byte[], ColumnDescriptor> getColumnDescriptors(
byte[] tableName) throws IOError, TException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("getColumnDescriptors: table=" + new String(tableName));
- }
try {
TreeMap<byte[], ColumnDescriptor> columns =
new TreeMap<byte[], ColumnDescriptor>(Bytes.BYTES_COMPARATOR);
@@ -669,7 +533,7 @@ private static void printUsageAndExit(final String message) {
}
/*
- * Start up the REST servlet in standalone mode.
+ * Start up the Thrift server.
* @param args
*/
protected static void doMain(final String [] args) throws Exception {
diff --git a/src/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java b/src/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
index 8c6112e0073a..dd8ac61db3d9 100644
--- a/src/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
+++ b/src/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.thrift;
-import java.io.IOException;
import java.util.Map;
import java.util.TreeMap;
@@ -27,9 +26,7 @@
import org.apache.hadoop.hbase.io.Cell;
import org.apache.hadoop.hbase.io.RowResult;
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
-import org.apache.hadoop.hbase.thrift.generated.IOError;
import org.apache.hadoop.hbase.thrift.generated.IllegalArgument;
-import org.apache.hadoop.hbase.thrift.generated.NotFound;
import org.apache.hadoop.hbase.thrift.generated.TCell;
import org.apache.hadoop.hbase.thrift.generated.TRowResult;
import org.apache.hadoop.hbase.util.Bytes;
|
42a3811fac0111978f65514801c080e48bee41fc
|
drools
|
[DROOLS-383] support for switch over String-- * Eclipse compiler has a least known documented configuration- property "org.eclipse.jdt.core.compiler.compliance" which- needs to be set along with the 'org.eclipse.jdt.core.compiler.source'- and 'org.eclipse.jdt.core.compiler.codegen.targetPlatform' properties.-- * this is going to work only when the underlaying JVM is 7+-
|
a
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/main/java/org/drools/compiler/commons/jci/compilers/EclipseJavaCompilerSettings.java b/drools-compiler/src/main/java/org/drools/compiler/commons/jci/compilers/EclipseJavaCompilerSettings.java
index 834c8742481..77543461760 100644
--- a/drools-compiler/src/main/java/org/drools/compiler/commons/jci/compilers/EclipseJavaCompilerSettings.java
+++ b/drools-compiler/src/main/java/org/drools/compiler/commons/jci/compilers/EclipseJavaCompilerSettings.java
@@ -54,6 +54,7 @@ public final class EclipseJavaCompilerSettings extends JavaCompilerSettings {
public static final String CompilerOptions_OPTION_Encoding = "org.eclipse.jdt.core.encoding"; //$NON-NLS-1$
public static final String CompilerOptions_OPTION_Source = "org.eclipse.jdt.core.compiler.source"; //$NON-NLS-1$
public static final String CompilerOptions_OPTION_TargetPlatform = "org.eclipse.jdt.core.compiler.codegen.targetPlatform"; //$NON-NLS-1$
+ public static final String CompilerOptions_OPTION_Compliance = "org.eclipse.jdt.core.compiler.compliance"; //$NON-NLS-1$
public static final String CompilerOptions_OPTION_ReportDeprecation = "org.eclipse.jdt.core.compiler.problem.deprecation"; //$NON-NLS-1$
final private Map defaultEclipseSettings = new HashMap();
@@ -107,6 +108,7 @@ Map toNativeSettings() {
map.put(CompilerOptions_OPTION_ReportDeprecation, isDeprecations()?CompilerOptions_GENERATE:CompilerOptions_DO_NOT_GENERATE);
map.put(CompilerOptions_OPTION_TargetPlatform, toNativeVersion(getTargetVersion()));
map.put(CompilerOptions_OPTION_Source, toNativeVersion(getSourceVersion()));
+ map.put(CompilerOptions_OPTION_Compliance, toNativeVersion(getSourceVersion()));
map.put(CompilerOptions_OPTION_Encoding, getSourceEncoding());
return map;
diff --git a/drools-compiler/src/test/java/org/drools/compiler/integrationtests/SwitchOverStringTest.java b/drools-compiler/src/test/java/org/drools/compiler/integrationtests/SwitchOverStringTest.java
new file mode 100644
index 00000000000..dd3d7f87ce5
--- /dev/null
+++ b/drools-compiler/src/test/java/org/drools/compiler/integrationtests/SwitchOverStringTest.java
@@ -0,0 +1,48 @@
+package org.drools.compiler.integrationtests;
+
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Assume;
+import org.junit.Test;
+import org.kie.api.io.ResourceType;
+import org.kie.internal.builder.KnowledgeBuilder;
+import org.kie.internal.builder.KnowledgeBuilderFactory;
+import org.kie.internal.io.ResourceFactory;
+
+public class SwitchOverStringTest {
+
+ private static final String FUNCTION_WITH_SWITCH_OVER_STRING = "function void theTest(String input) {\n" +
+ " switch(input) {\n" +
+ " case \"Hello World\" :" +
+ " System.out.println(\"yep\");\n" +
+ " break;\n" +
+ " default :\n" +
+ " System.out.println(\"uh\");\n" +
+ " break;\n" +
+ " }\n" +
+ "}";
+
+ @After
+ public void cleanUp() {
+ System.clearProperty("drools.dialect.java.compiler.lnglevel");
+ }
+
+ @Test
+ public void testCompileSwitchOverStringWithLngLevel17() {
+ double javaVersion = Double.valueOf(System.getProperty("java.specification.version"));
+ Assume.assumeTrue("Test only makes sense on Java 7+.", javaVersion >= 1.7);
+ System.setProperty("drools.dialect.java.compiler.lnglevel", "1.7");
+ KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
+ kbuilder.add(ResourceFactory.newByteArrayResource(FUNCTION_WITH_SWITCH_OVER_STRING.getBytes()), ResourceType.DRL);
+ Assert.assertFalse("Compilation error(s) occurred!", kbuilder.hasErrors());
+ }
+
+ @Test
+ public void testShouldFailToCompileSwitchOverStringWithLngLevel16() {
+ System.setProperty("drools.dialect.java.compiler.lnglevel", "1.6");
+ KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
+ kbuilder.add(ResourceFactory.newByteArrayResource(FUNCTION_WITH_SWITCH_OVER_STRING.getBytes()), ResourceType.DRL);
+ Assert.assertTrue("Compilation error(s) expected!", kbuilder.hasErrors());
+ }
+
+}
|
8d695a2f71b4c10c0b7cfff503c6bd64bd4ab8f8
|
orientdb
|
Issue -1404 Write cache speed improvements.--
|
a
|
https://github.com/orientechnologies/orientdb
|
diff --git a/commons/src/main/java/com/orientechnologies/common/concur/lock/OLockManager.java b/commons/src/main/java/com/orientechnologies/common/concur/lock/OLockManager.java
old mode 100644
new mode 100755
index f982a5b26f4..0251c5f5489
--- a/commons/src/main/java/com/orientechnologies/common/concur/lock/OLockManager.java
+++ b/commons/src/main/java/com/orientechnologies/common/concur/lock/OLockManager.java
@@ -71,6 +71,49 @@ public void acquireLock(final REQUESTER_TYPE iRequester, final RESOURCE_TYPE iRe
acquireLock(iRequester, iResourceId, iLockType, acquireTimeout);
}
+ public boolean tryLock(final REQUESTER_TYPE iRequester, final RESOURCE_TYPE iResourceId, final LOCK iLockType) {
+ if (!enabled)
+ return false;
+
+ CountableLock lock;
+ final Object internalLock = internalLock(iResourceId);
+ synchronized (internalLock) {
+ lock = map.get(iResourceId);
+ if (lock == null) {
+ final CountableLock newLock = new CountableLock(false);
+ lock = map.putIfAbsent(getImmutableResourceId(iResourceId), newLock);
+ if (lock == null)
+ lock = newLock;
+ }
+ lock.countLocks++;
+ }
+
+ boolean result;
+ try {
+ if (iLockType == LOCK.SHARED)
+ result = lock.readLock().tryLock();
+ else
+ result = lock.writeLock().tryLock();
+ } catch (RuntimeException e) {
+ synchronized (internalLock) {
+ lock.countLocks--;
+ if (lock.countLocks == 0)
+ map.remove(iResourceId);
+ }
+ throw e;
+ }
+
+ if (!result) {
+ synchronized (internalLock) {
+ lock.countLocks--;
+ if (lock.countLocks == 0)
+ map.remove(iResourceId);
+ }
+ }
+
+ return result;
+ }
+
public void acquireLock(final REQUESTER_TYPE iRequester, final RESOURCE_TYPE iResourceId, final LOCK iLockType, long iTimeout) {
if (!enabled)
return;
@@ -119,6 +162,10 @@ public void acquireLock(final REQUESTER_TYPE iRequester, final RESOURCE_TYPE iRe
}
+ public void tryacquireLock(final REQUESTER_TYPE iRequester, final RESOURCE_TYPE iResourceId, final LOCK iLockType, long iTimeout) {
+
+ }
+
public void releaseLock(final REQUESTER_TYPE iRequester, final RESOURCE_TYPE iResourceId, final LOCK iLockType)
throws OLockException {
if (!enabled)
diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/hashindex/local/cache/OWOWCache.java b/core/src/main/java/com/orientechnologies/orient/core/index/hashindex/local/cache/OWOWCache.java
index dbeef3ab6d5..7db1d9a2d07 100755
--- a/core/src/main/java/com/orientechnologies/orient/core/index/hashindex/local/cache/OWOWCache.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/index/hashindex/local/cache/OWOWCache.java
@@ -617,11 +617,18 @@ private int iterateBySubRing(NavigableMap<GroupKey, WriteGroup> subMap, int writ
final WriteGroup group = entry.getValue();
final GroupKey groupKey = entry.getKey();
+ if (group.recencyBit && group.creationTime - currentTime < groupTTL && !forceFlush) {
+ group.recencyBit = false;
+ continue;
+ }
+
lockManager.acquireLock(Thread.currentThread(), entry.getKey(), OLockManager.LOCK.EXCLUSIVE);
try {
if (group.recencyBit && group.creationTime - currentTime < groupTTL && !forceFlush)
group.recencyBit = false;
else {
+ group.recencyBit = false;
+
List<PageKey> lockedPages = new ArrayList<PageKey>();
for (int i = 0; i < 16; i++) {
final PageKey pageKey = new PageKey(groupKey.fileId, groupKey.groupIndex << 4 + i);
diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/hashindex/local/cache/WriteGroup.java b/core/src/main/java/com/orientechnologies/orient/core/index/hashindex/local/cache/WriteGroup.java
index 6c9ff4c1ec1..5162197e449 100755
--- a/core/src/main/java/com/orientechnologies/orient/core/index/hashindex/local/cache/WriteGroup.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/index/hashindex/local/cache/WriteGroup.java
@@ -5,14 +5,13 @@
* @since 7/24/13
*/
class WriteGroup {
- public OCachePointer[] pages = new OCachePointer[16];
+ public OCachePointer[] pages = new OCachePointer[16];
- public boolean recencyBit;
- public long creationTime;
+ public volatile boolean recencyBit;
+ public final long creationTime;
WriteGroup(long creationTime) {
- this.creationTime = creationTime;
-
this.recencyBit = true;
+ this.creationTime = creationTime;
}
}
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateDocumentSpeedTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateDocumentSpeedTest.java
index cc08c82e095..24670918ed5 100755
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateDocumentSpeedTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/speed/LocalCreateDocumentSpeedTest.java
@@ -20,6 +20,7 @@
import org.testng.annotations.Test;
import com.orientechnologies.orient.core.Orient;
+import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.document.ODatabaseDocument;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.intent.OIntentMassiveInsert;
@@ -47,6 +48,9 @@ public LocalCreateDocumentSpeedTest() throws InstantiationException, IllegalAcce
@Override
@Test(enabled = false)
public void init() {
+ OGlobalConfiguration.USE_WAL.setValue(false);
+ OGlobalConfiguration.STORAGE_COMPRESSION_METHOD.setValue("nothing");
+
database = new ODatabaseDocumentTx(System.getProperty("url"));
if (database.exists()) {
database.open("admin", "admin");
|
6bc9677bbe919cd5bcadd8af2f5b0c838757a4ce
|
drools
|
[DROOLS-293] fix ObjectTypeNode id creation and- comparison--
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-core/src/main/java/org/drools/core/reteoo/ObjectTypeNode.java b/drools-core/src/main/java/org/drools/core/reteoo/ObjectTypeNode.java
index 8a8f37d9cea..9756ad1485a 100644
--- a/drools-core/src/main/java/org/drools/core/reteoo/ObjectTypeNode.java
+++ b/drools-core/src/main/java/org/drools/core/reteoo/ObjectTypeNode.java
@@ -133,7 +133,7 @@ public ObjectTypeNode(final int id,
source,
context.getRuleBase().getConfiguration().getAlphaNodeHashingThreshold() );
this.objectType = objectType;
- idGenerator = new IdGenerator(objectType);
+ idGenerator = new IdGenerator(id);
setObjectMemoryEnabled( context.isObjectTypeNodeMemoryEnabled() );
@@ -145,15 +145,15 @@ public ObjectTypeNode(final int id,
}
private static class IdGenerator {
- private final Class<?> otnClass;
+ private final int otnId;
private int otnIdCounter;
- private IdGenerator(ObjectType objectType) {
- otnClass = objectType instanceof ClassObjectType ? ((ClassObjectType)objectType).getClassType() : Object.class;
+ private IdGenerator(int otnId) {
+ this.otnId = otnId;
}
private Id nextId() {
- return new Id(otnClass, otnIdCounter++);
+ return new Id(otnId, otnIdCounter++);
}
private void reset() {
@@ -161,21 +161,21 @@ private void reset() {
}
}
- public static Id DEFAULT_ID = new Id(Object.class, 0);
+ public static Id DEFAULT_ID = new Id(-1, 0);
public static class Id {
- private final Class<?> clazz;
+ private final int otnId;
private final int id;
- public Id(Class<?> clazz, int id) {
- this.clazz = clazz;
+ public Id(int otnId, int id) {
+ this.otnId = otnId;
this.id = id;
}
@Override
public String toString() {
- return "ObjectTypeNode.Id[" + clazz.getName() + "#" + id + "]";
+ return "ObjectTypeNode.Id[" + otnId + "#" + id + "]";
}
@Override
@@ -184,22 +184,16 @@ public boolean equals(Object o) {
if (o == null || !(o instanceof Id)) return false;
Id otherId = (Id) o;
- return id == otherId.id && clazz == otherId.clazz;
+ return id == otherId.id && otnId == otherId.otnId;
}
@Override
public int hashCode() {
- int result = clazz.hashCode();
- result = 31 * result + id;
- return result;
+ return 31 * otnId + 37 * id;
}
public boolean before(Id otherId) {
- return otherId != null && clazz == otherId.clazz && this.id < otherId.id;
- }
-
- public Class<?> getTypeNodeClass() {
- return clazz;
+ return otherId != null && ( otnId < otherId.otnId || ( otnId == otherId.otnId && id < otherId.id ) );
}
public int getId() {
@@ -217,12 +211,12 @@ public void readExternal(ObjectInput in) throws IOException,
if ( objectType instanceof ClassObjectType ) {
objectType = ((ReteooRuleBase) ((DroolsObjectInputStream) in).getRuleBase()).getClassFieldAccessorCache().getClassObjectType( (ClassObjectType) objectType );
}
- idGenerator = new IdGenerator(objectType);
objectMemoryEnabled = in.readBoolean();
expirationOffset = in.readLong();
queryNode = in.readBoolean();
dirty = true;
+ idGenerator = new IdGenerator(id);
}
public void writeExternal(ObjectOutput out) throws IOException {
|
abdc99e1d77335a0e401de20838f0914fcdc0242
|
kotlin
|
Removed doTest(char), since it was erroneous.--If lookupString and tailText are both null, completionChar is ignored.-
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/idea/tests/org/jetbrains/jet/completion/handlers/CompletionHandlerTest.java b/idea/tests/org/jetbrains/jet/completion/handlers/CompletionHandlerTest.java
index 6b1cd1d467081..3a15672434c6c 100644
--- a/idea/tests/org/jetbrains/jet/completion/handlers/CompletionHandlerTest.java
+++ b/idea/tests/org/jetbrains/jet/completion/handlers/CompletionHandlerTest.java
@@ -80,7 +80,7 @@ public void testExtFunction() {
}
public void testFunctionLiteralInsertOnSpace() {
- doTest(' ');
+ doTest(CompletionType.BASIC, 2, null, null, ' ');
}
public void testInsertImportOnTab() {
@@ -108,11 +108,7 @@ public void testHigherOrderFunctionWithArg() {
}
public void doTest() {
- doTest('\n');
- }
-
- public void doTest(char completionChar) {
- doTest(CompletionType.BASIC, 2, null, null, completionChar);
+ doTest(CompletionType.BASIC, 2, null, null, '\n');
}
public void doTest(CompletionType type, int time, @Nullable String lookupString, @Nullable String tailText, char completionChar) {
|
804bf3ad226dca85c9fa66791bb63b794ab66b73
|
kotlin
|
Refactoring of- AnnotationResolver.resolveAnnotation(s)Arguments--
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/AnnotationResolver.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/AnnotationResolver.java
index 8a2e891bcf974..84ed9c4377f3e 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/AnnotationResolver.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/AnnotationResolver.java
@@ -144,7 +144,7 @@ private Annotations resolveAnnotationEntries(
descriptor = new LazyAnnotationDescriptor(new LazyAnnotationsContextImpl(this, storageManager, trace, scope), entryElement);
}
if (shouldResolveArguments) {
- resolveAnnotationArguments(entryElement, trace);
+ resolveAnnotationArguments(descriptor);
}
result.add(descriptor);
@@ -202,30 +202,13 @@ public OverloadResolutionResults<FunctionDescriptor> resolveAnnotationCall(
);
}
- public static void resolveAnnotationsArguments(@Nullable JetModifierList modifierList, @NotNull BindingTrace trace) {
- if (modifierList == null) {
- return;
- }
-
- for (JetAnnotationEntry annotationEntry : modifierList.getAnnotationEntries()) {
- resolveAnnotationArguments(annotationEntry, trace);
- }
- }
-
- public static void resolveAnnotationsArguments(@NotNull Annotations annotations, @NotNull BindingTrace trace) {
+ public static void resolveAnnotationsArguments(@NotNull Annotations annotations) {
for (AnnotationDescriptor annotationDescriptor : annotations) {
- JetAnnotationEntry annotationEntry = trace.getBindingContext().get(ANNOTATION_DESCRIPTOR_TO_PSI_ELEMENT, annotationDescriptor);
- assert annotationEntry != null : "Cannot find annotation entry: " + annotationDescriptor;
- resolveAnnotationArguments(annotationEntry, trace);
+ resolveAnnotationArguments(annotationDescriptor);
}
}
- private static void resolveAnnotationArguments(
- @NotNull JetAnnotationEntry annotationEntry,
- @NotNull BindingTrace trace
- ) {
- AnnotationDescriptor annotationDescriptor = trace.getBindingContext().get(BindingContext.ANNOTATION, annotationEntry);
- assert annotationDescriptor != null : "Annotation descriptor should be created before resolving arguments for " + annotationEntry.getText();
+ private static void resolveAnnotationArguments(@NotNull AnnotationDescriptor annotationDescriptor) {
if (annotationDescriptor instanceof LazyAnnotationDescriptor) {
((LazyAnnotationDescriptor) annotationDescriptor).forceResolveAllContents();
}
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/BodyResolver.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/BodyResolver.java
index 92d9b51797f80..a8590f09f3a69 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/BodyResolver.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/BodyResolver.java
@@ -159,7 +159,7 @@ public void resolveSecondaryConstructorBody(
@NotNull final ConstructorDescriptor descriptor,
@NotNull JetScope declaringScope
) {
- AnnotationResolver.resolveAnnotationsArguments(constructor.getModifierList(), trace);
+ AnnotationResolver.resolveAnnotationsArguments(descriptor.getAnnotations());
final CallChecker callChecker = new ConstructorHeaderCallChecker(descriptor, additionalCheckerProvider.getCallChecker());
resolveFunctionBody(c, trace, constructor, descriptor, declaringScope,
@@ -504,14 +504,14 @@ private void resolvePrimaryConstructorParameters(@NotNull BodiesResolveContext c
JetClassOrObject klass = entry.getKey();
ClassDescriptorWithResolutionScopes classDescriptor = entry.getValue();
ConstructorDescriptor unsubstitutedPrimaryConstructor = classDescriptor.getUnsubstitutedPrimaryConstructor();
-
- AnnotationResolver.resolveAnnotationsArguments(klass.getPrimaryConstructorModifierList(), trace);
-
if (unsubstitutedPrimaryConstructor != null) {
+ AnnotationResolver.resolveAnnotationsArguments(unsubstitutedPrimaryConstructor.getAnnotations());
+
WritableScope parameterScope = getPrimaryConstructorParametersScope(classDescriptor.getScopeForClassHeaderResolution(),
unsubstitutedPrimaryConstructor);
- valueParameterResolver.resolveValueParameters(klass.getPrimaryConstructorParameters(), unsubstitutedPrimaryConstructor.getValueParameters(),
- parameterScope, c.getOuterDataFlowInfo(), trace);
+ valueParameterResolver.resolveValueParameters(klass.getPrimaryConstructorParameters(),
+ unsubstitutedPrimaryConstructor.getValueParameters(),
+ parameterScope, c.getOuterDataFlowInfo(), trace);
}
}
}
@@ -559,7 +559,7 @@ private void resolvePropertyDeclarationBodies(@NotNull BodiesResolveContext c) {
resolvePropertyDelegate(c, property, propertyDescriptor, delegateExpression, classDescriptor.getScopeForMemberDeclarationResolution(), propertyScope);
}
- resolveAnnotationArguments(propertyScope, property);
+ AnnotationResolver.resolveAnnotationsArguments(propertyDescriptor.getAnnotations());
resolvePropertyAccessors(c, property, propertyDescriptor);
processed.add(property);
@@ -587,7 +587,7 @@ private void resolvePropertyDeclarationBodies(@NotNull BodiesResolveContext c) {
resolvePropertyDelegate(c, property, propertyDescriptor, delegateExpression, propertyScope, propertyScope);
}
- resolveAnnotationArguments(propertyScope, property);
+ AnnotationResolver.resolveAnnotationsArguments(propertyDescriptor.getAnnotations());
resolvePropertyAccessors(c, property, propertyDescriptor);
}
@@ -610,7 +610,7 @@ public void resolvePropertyAccessors(
PropertyGetterDescriptor getterDescriptor = propertyDescriptor.getGetter();
if (getter != null && getterDescriptor != null) {
JetScope accessorScope = makeScopeForPropertyAccessor(c, getter, propertyDescriptor);
- resolveAnnotationArguments(accessorScope, getter);
+ AnnotationResolver.resolveAnnotationsArguments(getterDescriptor.getAnnotations());
resolveFunctionBody(c, fieldAccessTrackingTrace, getter, getterDescriptor, accessorScope);
}
@@ -618,7 +618,7 @@ public void resolvePropertyAccessors(
PropertySetterDescriptor setterDescriptor = propertyDescriptor.getSetter();
if (setter != null && setterDescriptor != null) {
JetScope accessorScope = makeScopeForPropertyAccessor(c, setter, propertyDescriptor);
- resolveAnnotationArguments(accessorScope, setter);
+ AnnotationResolver.resolveAnnotationsArguments(setterDescriptor.getAnnotations());
resolveFunctionBody(c, fieldAccessTrackingTrace, setter, setterDescriptor, accessorScope);
}
}
@@ -778,10 +778,6 @@ public void resolveConstructorParameterDefaultValuesAndAnnotations(
c.getOuterDataFlowInfo(), trace);
}
- private void resolveAnnotationArguments(@NotNull JetScope scope, @NotNull JetModifierListOwner owner) {
- AnnotationResolver.resolveAnnotationsArguments(owner.getModifierList(), trace);
- }
-
private static void computeDeferredType(JetType type) {
// handle type inference loop: function or property body contains a reference to itself
// fun f() = { f() }
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/LazyDeclarationResolver.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/LazyDeclarationResolver.java
index 273e7c4b80f5a..31492551d9e4a 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/LazyDeclarationResolver.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/lazy/LazyDeclarationResolver.java
@@ -211,7 +211,7 @@ public DeclarationDescriptor visitJetElement(@NotNull JetElement element, Void d
throw new IllegalStateException("No descriptor resolved for " + declaration + ":\n" +
PsiUtilPackage.getElementTextWithContext(declaration));
}
- AnnotationResolver.resolveAnnotationsArguments(result.getAnnotations(), trace);
+ AnnotationResolver.resolveAnnotationsArguments(result.getAnnotations());
return result;
}
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/FunctionsTypingVisitor.kt b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/FunctionsTypingVisitor.kt
index 78e512311b4f0..3075be4aa5185 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/FunctionsTypingVisitor.kt
+++ b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/FunctionsTypingVisitor.kt
@@ -90,7 +90,7 @@ public class FunctionsTypingVisitor(facade: ExpressionTypingInternals) : Express
context.trace, context.dataFlowInfo, context.expectedType
)
}
- AnnotationResolver.resolveAnnotationsArguments(functionDescriptor.getAnnotations(), context.trace);
+ AnnotationResolver.resolveAnnotationsArguments(functionDescriptor.getAnnotations());
val functionInnerScope = FunctionDescriptorUtil.getFunctionInnerScope(context.scope, functionDescriptor, context.trace)
components.expressionTypingServices.checkFunctionReturnType(
@@ -167,7 +167,7 @@ public class FunctionsTypingVisitor(facade: ExpressionTypingInternals) : Express
initializeFunctionDescriptorAndExplicitReturnType(context.scope.getContainingDeclaration(), context.scope, functionLiteral,
functionDescriptor, context.trace, context.expectedType)
for (parameterDescriptor in functionDescriptor.getValueParameters()) {
- AnnotationResolver.resolveAnnotationsArguments(parameterDescriptor.getAnnotations(), context.trace)
+ AnnotationResolver.resolveAnnotationsArguments(parameterDescriptor.getAnnotations())
}
BindingContextUtils.recordFunctionDeclarationToDescriptor(context.trace, functionLiteral, functionDescriptor)
return functionDescriptor
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ValueParameterResolver.kt b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ValueParameterResolver.kt
index bae1179f575ea..d967702491f4b 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ValueParameterResolver.kt
+++ b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ValueParameterResolver.kt
@@ -49,7 +49,7 @@ public class ValueParameterResolver(
context: ExpressionTypingContext
) {
for ((descriptor, parameter) in valueParameterDescriptors zip valueParameters) {
- AnnotationResolver.resolveAnnotationsArguments(parameter.getModifierList(), context.trace)
+ AnnotationResolver.resolveAnnotationsArguments(descriptor.getAnnotations())
resolveDefaultValue(descriptor, parameter, context)
}
}
diff --git a/idea/ide-common/src/org/jetbrains/kotlin/resolve/lazy/ElementResolver.kt b/idea/ide-common/src/org/jetbrains/kotlin/resolve/lazy/ElementResolver.kt
index 2d93a071fa192..a8ccdd2ed2f2c 100644
--- a/idea/ide-common/src/org/jetbrains/kotlin/resolve/lazy/ElementResolver.kt
+++ b/idea/ide-common/src/org/jetbrains/kotlin/resolve/lazy/ElementResolver.kt
@@ -248,21 +248,21 @@ public abstract class ElementResolver protected(
val modifierList = jetAnnotationEntry.getParentOfType<JetModifierList>(true)
val declaration = modifierList?.getParentOfType<JetDeclaration>(true)
if (declaration != null) {
- doResolveAnnotations(resolveSession, getAnnotationsByDeclaration(resolveSession, modifierList!!, declaration))
+ doResolveAnnotations(getAnnotationsByDeclaration(resolveSession, modifierList!!, declaration))
}
else {
val fileAnnotationList = jetAnnotationEntry.getParentOfType<JetFileAnnotationList>(true)
if (fileAnnotationList != null) {
- doResolveAnnotations(resolveSession, resolveSession.getFileAnnotations(fileAnnotationList.getContainingJetFile()))
+ doResolveAnnotations(resolveSession.getFileAnnotations(fileAnnotationList.getContainingJetFile()))
}
if (modifierList != null && modifierList.getParent() is JetFile) {
- doResolveAnnotations(resolveSession, resolveSession.getDanglingAnnotations(modifierList.getContainingJetFile()))
+ doResolveAnnotations(resolveSession.getDanglingAnnotations(modifierList.getContainingJetFile()))
}
}
}
- private fun doResolveAnnotations(resolveSession: ResolveSession, annotations: Annotations) {
- AnnotationResolver.resolveAnnotationsArguments(annotations, resolveSession.getTrace())
+ private fun doResolveAnnotations(annotations: Annotations) {
+ AnnotationResolver.resolveAnnotationsArguments(annotations)
ForceResolveUtil.forceResolveAllContents(annotations)
}
|
d104b26a38fc5379dd01a2a61d865c4dc871cb55
|
hbase
|
HBASE-7715 FSUtils-waitOnSafeMode can incorrectly- loop on standby NN (Ted Yu)--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1440600 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
index 74a568896ace..31b596d84594 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
@@ -24,6 +24,7 @@
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
+import java.lang.reflect.Method;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
@@ -238,6 +239,31 @@ public static void checkFileSystemAvailable(final FileSystem fs)
throw io;
}
+ /**
+ * We use reflection because {@link DistributedFileSystem#setSafeMode(
+ * FSConstants.SafeModeAction action, boolean isChecked)} is not in hadoop 1.1
+ *
+ * @param dfs
+ * @return whether we're in safe mode
+ * @throws IOException
+ */
+ private static boolean isInSafeMode(DistributedFileSystem dfs) throws IOException {
+ boolean inSafeMode = false;
+ try {
+ Method m = DistributedFileSystem.class.getMethod("setSafeMode", new Class<?> []{
+ org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction.class, boolean.class});
+ inSafeMode = (Boolean) m.invoke(dfs,
+ org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction.SAFEMODE_GET, true);
+ } catch (Exception e) {
+ if (e instanceof IOException) throw (IOException) e;
+
+ // Check whether dfs is on safemode.
+ inSafeMode = dfs.setSafeMode(
+ org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction.SAFEMODE_GET);
+ }
+ return inSafeMode;
+ }
+
/**
* Check whether dfs is in safemode.
* @param conf
@@ -249,8 +275,7 @@ public static void checkDfsSafeMode(final Configuration conf)
FileSystem fs = FileSystem.get(conf);
if (fs instanceof DistributedFileSystem) {
DistributedFileSystem dfs = (DistributedFileSystem)fs;
- // Check whether dfs is on safemode.
- isInSafeMode = dfs.setSafeMode(org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction.SAFEMODE_GET);
+ isInSafeMode = isInSafeMode(dfs);
}
if (isInSafeMode) {
throw new IOException("File system is in safemode, it can't be written now");
@@ -622,7 +647,7 @@ public static void waitOnSafeMode(final Configuration conf,
if (!(fs instanceof DistributedFileSystem)) return;
DistributedFileSystem dfs = (DistributedFileSystem)fs;
// Make sure dfs is not in safe mode
- while (dfs.setSafeMode(org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction.SAFEMODE_GET)) {
+ while (isInSafeMode(dfs)) {
LOG.info("Waiting for dfs to exit safe mode...");
try {
Thread.sleep(wait);
|
c029bfc5918c47a8c9a97e23bf94184ef964a017
|
drools
|
BZ-1074672: Fixing legacy API to properly handle- resource configuration. (cherry picked from commit- 645d0e5cb86225a898391960350588d1817b1de2)--
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-core/src/main/java/org/drools/core/builder/conf/impl/JaxbConfigurationImpl.java b/drools-core/src/main/java/org/drools/core/builder/conf/impl/JaxbConfigurationImpl.java
index ca1008fdaa9..abf9cec54ff 100644
--- a/drools-core/src/main/java/org/drools/core/builder/conf/impl/JaxbConfigurationImpl.java
+++ b/drools-core/src/main/java/org/drools/core/builder/conf/impl/JaxbConfigurationImpl.java
@@ -25,12 +25,12 @@
import java.util.List;
import java.util.Properties;
-import com.sun.tools.xjc.Language;
-import org.kie.internal.builder.JaxbConfiguration;
import org.kie.api.io.ResourceConfiguration;
+import org.kie.internal.builder.JaxbConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import com.sun.tools.xjc.Language;
import com.sun.tools.xjc.Options;
public class JaxbConfigurationImpl extends ResourceConfigurationImpl implements JaxbConfiguration {
@@ -70,6 +70,14 @@ public void setClasses(List<String> classes) {
this.classes = classes;
}
+ public void setSystemId(String systemId) {
+ this.systemId = systemId;
+ }
+
+ public void setXjcOpts(Options xjcOpts) {
+ this.xjcOpts = xjcOpts;
+ }
+
public byte[] toByteArray() {
ByteArrayOutputStream buf = new ByteArrayOutputStream();
try {
diff --git a/knowledge-api-legacy5-adapter/pom.xml b/knowledge-api-legacy5-adapter/pom.xml
index 2cea9dac988..467b1aa4252 100644
--- a/knowledge-api-legacy5-adapter/pom.xml
+++ b/knowledge-api-legacy5-adapter/pom.xml
@@ -28,6 +28,10 @@
<groupId>org.drools</groupId>
<artifactId>drools-reteoo</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.drools</groupId>
+ <artifactId>drools-decisiontables</artifactId>
+ </dependency>
<dependency>
<!-- External dependencies -->
<groupId>org.osgi</groupId>
diff --git a/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/JaxbConfigurationImpl.java b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/JaxbConfigurationImpl.java
new file mode 100644
index 00000000000..652aff173c1
--- /dev/null
+++ b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/JaxbConfigurationImpl.java
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2010 JBoss Inc
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.drools.impl;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.drools.builder.JaxbConfiguration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.sun.tools.xjc.Options;
+
+public class JaxbConfigurationImpl implements JaxbConfiguration {
+ private final Logger logger = LoggerFactory.getLogger( JaxbConfigurationImpl.class );
+
+ private Options xjcOpts;
+ private String systemId;
+
+ private List<String> classes;
+
+ public JaxbConfigurationImpl() { }
+
+ public JaxbConfigurationImpl(Options xjcOpts,
+ String systemId) {
+ this.xjcOpts = xjcOpts;
+ this.systemId = systemId;
+ this.classes = new ArrayList<String>();
+ }
+
+
+ public Options getXjcOpts() {
+ return xjcOpts;
+ }
+
+
+ public String getSystemId() {
+ return systemId;
+ }
+
+
+ public List<String> getClasses() {
+ return classes;
+ }
+
+
+ public void setClasses(List<String> classes) {
+ this.classes = classes;
+ }
+
+ public void setSystemId(String systemId) {
+ this.systemId = systemId;
+ }
+
+ public void setXjcOpts(Options xjcOpts) {
+ this.xjcOpts = xjcOpts;
+ }
+}
diff --git a/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/KnowledgeBuilderFactoryServiceImpl.java b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/KnowledgeBuilderFactoryServiceImpl.java
index 249683410d9..76a8385bd3e 100644
--- a/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/KnowledgeBuilderFactoryServiceImpl.java
+++ b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/KnowledgeBuilderFactoryServiceImpl.java
@@ -1,5 +1,7 @@
package org.drools.impl;
+import java.util.Properties;
+
import org.drools.KnowledgeBase;
import org.drools.builder.DecisionTableConfiguration;
import org.drools.builder.JaxbConfiguration;
@@ -8,14 +10,10 @@
import org.drools.builder.KnowledgeBuilderFactoryService;
import org.drools.compiler.compiler.PackageBuilder;
import org.drools.compiler.compiler.PackageBuilderConfiguration;
-import org.drools.core.builder.conf.impl.JaxbConfigurationImpl;
import org.drools.core.impl.KnowledgeBaseImpl;
-
-import java.util.Properties;
+import org.drools.impl.adapters.KnowledgeBuilderConfigurationAdapter;
import com.sun.tools.xjc.Options;
-import org.drools.impl.adapters.JaxbConfigurationAdapter;
-import org.drools.impl.adapters.KnowledgeBuilderConfigurationAdapter;
public class KnowledgeBuilderFactoryServiceImpl implements KnowledgeBuilderFactoryService {
@@ -58,6 +56,6 @@ public KnowledgeBuilder newKnowledgeBuilder(KnowledgeBase kbase,
public JaxbConfiguration newJaxbConfiguration(Options xjcOpts,
String systemId) {
- return new JaxbConfigurationAdapter(new JaxbConfigurationImpl( xjcOpts, systemId ));
+ return new org.drools.impl.JaxbConfigurationImpl( xjcOpts, systemId );
}
}
diff --git a/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/KnowledgeBuilderImpl.java b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/KnowledgeBuilderImpl.java
index bb3b0e9b541..9110fda9110 100644
--- a/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/KnowledgeBuilderImpl.java
+++ b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/KnowledgeBuilderImpl.java
@@ -1,7 +1,14 @@
package org.drools.impl;
+import static org.drools.impl.adapters.AdapterUtil.adaptResultSeverity;
+import static org.drools.impl.adapters.KnowledgePackageAdapter.adaptKnowledgePackages;
+
+import java.util.Collection;
+
import org.drools.KnowledgeBase;
import org.drools.builder.CompositeKnowledgeBuilder;
+import org.drools.builder.DecisionTableConfiguration;
+import org.drools.builder.JaxbConfiguration;
import org.drools.builder.KnowledgeBuilder;
import org.drools.builder.KnowledgeBuilderErrors;
import org.drools.builder.KnowledgeBuilderResults;
@@ -11,17 +18,14 @@
import org.drools.compiler.compiler.PackageBuilder;
import org.drools.definition.KnowledgePackage;
import org.drools.impl.adapters.CompositeKnowledgeBuilderAdapter;
+import org.drools.impl.adapters.DecisionTableConfigurationAdapter;
+import org.drools.impl.adapters.JaxbConfigurationAdapter;
import org.drools.impl.adapters.KnowledgeBaseAdapter;
import org.drools.impl.adapters.KnowledgeBuilderErrorsAdapter;
import org.drools.impl.adapters.KnowledgeBuilderResultsAdapter;
import org.drools.impl.adapters.ResourceAdapter;
import org.drools.io.Resource;
-import java.util.Collection;
-
-import static org.drools.impl.adapters.AdapterUtil.adaptResultSeverity;
-import static org.drools.impl.adapters.KnowledgePackageAdapter.adaptKnowledgePackages;
-
public class KnowledgeBuilderImpl implements KnowledgeBuilder {
private final org.drools.compiler.builder.impl.KnowledgeBuilderImpl delegate;
@@ -35,7 +39,15 @@ public void add(Resource resource, ResourceType type) {
}
public void add(Resource resource, ResourceType type, ResourceConfiguration configuration) {
- delegate.add(((ResourceAdapter)resource).getDelegate(), type.toKieResourceType(), null);
+ org.kie.api.io.ResourceConfiguration conf = null;
+ if( configuration != null ) {
+ if( configuration instanceof DecisionTableConfiguration ) {
+ conf = new DecisionTableConfigurationAdapter( (DecisionTableConfiguration) configuration );
+ } else if( configuration instanceof JaxbConfiguration ) {
+ conf = new JaxbConfigurationAdapter((JaxbConfiguration) configuration);
+ }
+ }
+ delegate.add(((ResourceAdapter)resource).getDelegate(), type.toKieResourceType(), conf );
}
public Collection<KnowledgePackage> getKnowledgePackages() {
diff --git a/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/adapters/DecisionTableConfigurationAdapter.java b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/adapters/DecisionTableConfigurationAdapter.java
new file mode 100644
index 00000000000..ce91dd23fb3
--- /dev/null
+++ b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/adapters/DecisionTableConfigurationAdapter.java
@@ -0,0 +1,60 @@
+package org.drools.impl.adapters;
+
+import java.util.Properties;
+
+import org.drools.core.builder.conf.impl.DecisionTableConfigurationImpl;
+import org.drools.core.builder.conf.impl.ResourceConfigurationImpl;
+import org.kie.api.io.ResourceConfiguration;
+import org.kie.api.io.ResourceType;
+import org.kie.internal.builder.DecisionTableConfiguration;
+import org.kie.internal.builder.DecisionTableInputType;
+
+
+public class DecisionTableConfigurationAdapter extends ResourceConfigurationImpl implements DecisionTableConfiguration {
+
+ private static final long serialVersionUID = -2052308765193190359L;
+
+ private final org.drools.builder.DecisionTableConfiguration delegate;
+
+ public DecisionTableConfigurationAdapter( org.drools.builder.DecisionTableConfiguration delegate ) {
+ super.setResourceType(ResourceType.DTABLE);
+ this.delegate = delegate;
+ }
+
+ public void setInputType(org.drools.builder.DecisionTableInputType inputType) {
+ delegate.setInputType(inputType);
+ }
+
+ public DecisionTableInputType getInputType() {
+ return delegate.getInputType() == org.drools.builder.DecisionTableInputType.CSV ? DecisionTableInputType.CSV : DecisionTableInputType.XLS;
+ }
+
+ public void setWorksheetName(String name) {
+ delegate.setWorksheetName(name);
+ }
+
+ public String getWorksheetName() {
+ return delegate.getWorksheetName();
+ }
+
+ public Properties toProperties() {
+ Properties prop = super.toProperties();
+ prop.setProperty( DecisionTableConfigurationImpl.DROOLS_DT_TYPE, getInputType().toString() );
+ if( getWorksheetName() != null ) {
+ prop.setProperty( DecisionTableConfigurationImpl.DROOLS_DT_WORKSHEET, getWorksheetName() );
+ }
+ return prop;
+ }
+
+ public ResourceConfiguration fromProperties(Properties prop) {
+ super.fromProperties(prop);
+ setInputType( DecisionTableInputType.valueOf( prop.getProperty( DecisionTableConfigurationImpl.DROOLS_DT_TYPE, DecisionTableInputType.XLS.toString() ) ) );
+ setWorksheetName( prop.getProperty( DecisionTableConfigurationImpl.DROOLS_DT_WORKSHEET, null ) );
+ return this;
+ }
+
+ @Override
+ public void setInputType(DecisionTableInputType inputType) {
+ delegate.setInputType( inputType == DecisionTableInputType.CSV ? org.drools.builder.DecisionTableInputType.CSV : org.drools.builder.DecisionTableInputType.XLS);
+ }
+}
diff --git a/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/adapters/JaxbConfigurationAdapter.java b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/adapters/JaxbConfigurationAdapter.java
index ec6917d4327..1fda38ba3cd 100644
--- a/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/adapters/JaxbConfigurationAdapter.java
+++ b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/adapters/JaxbConfigurationAdapter.java
@@ -1,15 +1,23 @@
package org.drools.impl.adapters;
-import com.sun.tools.xjc.Options;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Properties;
+
+import org.drools.core.builder.conf.impl.JaxbConfigurationImpl;
+import org.drools.core.builder.conf.impl.ResourceConfigurationImpl;
+import org.kie.api.io.ResourceConfiguration;
import org.kie.internal.builder.JaxbConfiguration;
-import java.util.List;
+import com.sun.tools.xjc.Language;
+import com.sun.tools.xjc.Options;
-public class JaxbConfigurationAdapter implements org.drools.builder.JaxbConfiguration {
+public class JaxbConfigurationAdapter extends ResourceConfigurationImpl implements JaxbConfiguration {
- private final JaxbConfiguration delegate;
+ private static final long serialVersionUID = -1425447385459529502L;
+ private final org.drools.builder.JaxbConfiguration delegate;
- public JaxbConfigurationAdapter(JaxbConfiguration delegate) {
+ public JaxbConfigurationAdapter(org.drools.builder.JaxbConfiguration delegate) {
this.delegate = delegate;
}
@@ -24,4 +32,51 @@ public String getSystemId() {
public List<String> getClasses() {
return delegate.getClasses();
}
+
+ public Properties toProperties() {
+ Properties prop = super.toProperties();
+ prop.setProperty( "drools.jaxb.conf.systemId", getSystemId() );
+ prop.setProperty( "drools.jaxb.conf.classes", getClass().toString() );
+ Options xjcOpts = getXjcOpts();
+ if (xjcOpts != null) {
+ // how to serialize Options to a property file???
+ prop.setProperty( "drools.jaxb.conf.opts.class", xjcOpts.getClass().getName() );
+ if (xjcOpts.getSchemaLanguage() != null) {
+ prop.setProperty( "drools.jaxb.conf.opts.lang", xjcOpts.getSchemaLanguage().toString() );
+ }
+ }
+ return prop;
+ }
+
+ public ResourceConfiguration fromProperties(Properties prop) {
+ super.fromProperties(prop);
+ ((JaxbConfigurationImpl)delegate).setSystemId( prop.getProperty( "drools.jaxb.conf.systemId", null ) );
+ String classesStr = prop.getProperty( "drools.jaxb.conf.classes", "[]" );
+ classesStr = classesStr.substring( 1, classesStr.length()-1 ).trim();
+ List<String> classes = new ArrayList<String>();
+ if( classesStr != null && classesStr.length() > 1 ) {
+ // can't use Arrays.asList() because have to trim() each element
+ for( String clz : classesStr.split( "," ) ) {
+ classes.add( clz.trim() );
+ }
+ }
+ ((JaxbConfigurationImpl)delegate).setClasses(classes);
+
+ // how to deserialize Options from a properties file?
+ String optsClass = prop.getProperty( "drools.jaxb.conf.opts.class", null );
+ if (optsClass != null) {
+ try {
+ Options xjcOpts = (Options) Class.forName( optsClass ).newInstance();
+ String optsLang = prop.getProperty( "drools.jaxb.conf.opts.lang", null );
+ if (optsLang != null) {
+ xjcOpts.setSchemaLanguage( Language.valueOf(optsLang) );
+ }
+ ((JaxbConfigurationImpl)delegate).setXjcOpts(xjcOpts);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ return this;
+ }
}
diff --git a/knowledge-api-legacy5-adapter/src/test/java/org/drools/integrationtests/ResourceCompilationTest.java b/knowledge-api-legacy5-adapter/src/test/java/org/drools/integrationtests/ResourceCompilationTest.java
new file mode 100644
index 00000000000..f116afa75cd
--- /dev/null
+++ b/knowledge-api-legacy5-adapter/src/test/java/org/drools/integrationtests/ResourceCompilationTest.java
@@ -0,0 +1,42 @@
+package org.drools.integrationtests;
+
+import org.drools.builder.DecisionTableConfiguration;
+import org.drools.builder.DecisionTableInputType;
+import org.junit.Test;
+import org.drools.builder.KnowledgeBuilder;
+import org.drools.builder.KnowledgeBuilderFactory;
+import org.drools.builder.ResourceType;
+import org.drools.io.ResourceFactory;
+
+/**
+ * Illustrates knowledge-api resource compilation problems.
+ */
+public class ResourceCompilationTest {
+
+ @Test
+ public void testDecisionTableXls() {
+ DecisionTableConfiguration dtconf = KnowledgeBuilderFactory.newDecisionTableConfiguration();
+ dtconf.setInputType(DecisionTableInputType.XLS);
+
+ KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
+ kbuilder.add(ResourceFactory.newClassPathResource("sample.xls", getClass()), ResourceType.DTABLE, dtconf);
+
+ if (kbuilder.hasErrors()) {
+ throw new RuntimeException("Drools compile errors: " + kbuilder.getErrors().toString());
+ }
+ }
+
+ @Test
+ public void testDecisionTableCsv() {
+ DecisionTableConfiguration dtconf = KnowledgeBuilderFactory.newDecisionTableConfiguration();
+ dtconf.setInputType(DecisionTableInputType.CSV);
+
+ KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
+ kbuilder.add(ResourceFactory.newClassPathResource("sample.csv", getClass()), ResourceType.DTABLE, dtconf);
+
+ if (kbuilder.hasErrors()) {
+ throw new RuntimeException("Drools compile errors: " + kbuilder.getErrors().toString());
+ }
+ }
+
+}
diff --git a/knowledge-api-legacy5-adapter/src/test/java/org/drools/model/Person.java b/knowledge-api-legacy5-adapter/src/test/java/org/drools/model/Person.java
new file mode 100644
index 00000000000..8eabfec84fc
--- /dev/null
+++ b/knowledge-api-legacy5-adapter/src/test/java/org/drools/model/Person.java
@@ -0,0 +1,109 @@
+package org.drools.model;
+
+import java.io.Serializable;
+import javax.xml.bind.annotation.XmlRootElement;
+
+/**
+ * Sample fact for person.
+ */
+@XmlRootElement
+public class Person implements Serializable {
+
+ private static final long serialVersionUID = -5411807328989112195L;
+
+ private int id = 0;
+ private String name = "";
+ private int age;
+ private String likes;
+
+ public Person() {
+ }
+
+ public Person(String name) {
+ super();
+ this.name = name;
+ }
+
+ public Person(String name, int age) {
+ this.name = name;
+ this.age = age;
+ }
+
+ public Person(String name, String likes, int age) {
+ this.name = name;
+ this.likes = likes;
+ this.age = age;
+ }
+
+ public int getId() {
+ return id;
+ }
+
+ public void setId(int id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ @Override
+ public String toString() {
+ return String.format("%s[id='%s', name='%s']", getClass().getName(), id, name);
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + id;
+ result = prime * result + ((name == null) ? 0 : name.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null) {
+ return false;
+ }
+ if (getClass() != obj.getClass()) {
+ return false;
+ }
+ Person other = (Person) obj;
+ if (id != other.id) {
+ return false;
+ }
+ if (name == null) {
+ if (other.name != null) {
+ return false;
+ }
+ } else if (!name.equals(other.name)) {
+ return false;
+ }
+ return true;
+ }
+
+ public void setAge(int age) {
+ this.age = age;
+ }
+
+ public int getAge() {
+ return age;
+ }
+
+ public void setLikes(String likes) {
+ this.likes = likes;
+ }
+
+ public String getLikes() {
+ return likes;
+ }
+
+}
diff --git a/knowledge-api-legacy5-adapter/src/test/resources/org/drools/integrationtests/sample.csv b/knowledge-api-legacy5-adapter/src/test/resources/org/drools/integrationtests/sample.csv
new file mode 100644
index 00000000000..35931d14344
--- /dev/null
+++ b/knowledge-api-legacy5-adapter/src/test/resources/org/drools/integrationtests/sample.csv
@@ -0,0 +1,13 @@
+,
+"RuleSet","org.drools.knowledgeapi"
+"Import","org.drools.model.Person"
+"Notes",
+,
+"RuleTable ID change",
+"CONDITION","ACTION"
+"person:Person","person"
+"id == $param","setId($param)"
+"ID","new ID"
+0,1
+1,2
+2,3
diff --git a/knowledge-api-legacy5-adapter/src/test/resources/org/drools/integrationtests/sample.xls b/knowledge-api-legacy5-adapter/src/test/resources/org/drools/integrationtests/sample.xls
new file mode 100644
index 00000000000..06090780a4b
Binary files /dev/null and b/knowledge-api-legacy5-adapter/src/test/resources/org/drools/integrationtests/sample.xls differ
|
afb1efaa343a4341f9883741adeae77589210df6
|
kotlin
|
Refactored reference provider logic.--
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/.idea/workspace.xml b/.idea/workspace.xml
index d0362e355ceec..99bc02b26e7aa 100644
--- a/.idea/workspace.xml
+++ b/.idea/workspace.xml
@@ -8,48 +8,12 @@
</component>
<component name="ChangeListManager">
<list default="true" id="f02ef53d-47a4-4f5e-a1aa-3df68aae9b20" name="Default" comment="">
- <change type="NEW" beforePath="" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/expression/ExpressionTranslator.java" />
- <change type="NEW" beforePath="" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/expression/ExpressionVisitor.java" />
- <change type="NEW" beforePath="" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/expression/FunctionTranslator.java" />
- <change type="NEW" beforePath="" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/expression/PatternTranslator.java" />
- <change type="NEW" beforePath="" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/expression/WhenTranslator.java" />
- <change type="NEW" beforePath="" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/general/AbstractTranslator.java" />
- <change type="NEW" beforePath="" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/general/TranslationContext.java" />
- <change type="NEW" beforePath="" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/general/TranslatorVisitor.java" />
- <change type="NEW" beforePath="" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/utils/BindingUtils.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\AbstractTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/general/AbstractTranslator.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\BinaryOperationTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/operation/BinaryOperationTranslator.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\BindingUtils.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/utils/BindingUtils.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\ExpressionTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/expression/ExpressionTranslator.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\ExpressionVisitor.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/expression/ExpressionVisitor.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\FunctionTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/expression/FunctionTranslator.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\GenerationState.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/general/GenerationState.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\Namer.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/utils/Namer.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\OperationTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/operation/OperationTranslator.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\OperatorTable.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/operation/OperatorTable.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\PatternTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/expression/PatternTranslator.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\PropertyAccessTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/reference/PropertyAccessTranslator.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\ReferenceProvider.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/reference/ReferenceProvider.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\ReferenceTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/reference/ReferenceTranslator.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\Translation.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/general/Translation.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\TranslationContext.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/general/TranslationContext.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\TranslationUtils.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/utils/TranslationUtils.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\TranslatorVisitor.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/general/TranslatorVisitor.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\UnaryOperationTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/operation/UnaryOperationTranslator.java" />
- <change type="MOVED" beforePath="C:\Dev\Projects\jet-contrib\k2js\translator\src\org\jetbrains\k2js\translate\WhenTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/expression/WhenTranslator.java" />
<change type="MODIFICATION" beforePath="$PROJECT_DIR$/.idea/workspace.xml" afterPath="$PROJECT_DIR$/.idea/workspace.xml" />
- <change type="MODIFICATION" beforePath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/K2JSTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/K2JSTranslator.java" />
- <change type="MODIFICATION" beforePath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/declarations/ExtractionVisitor.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/declarations/ExtractionVisitor.java" />
- <change type="MODIFICATION" beforePath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/declaration/ClassDeclarationTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/declaration/ClassDeclarationTranslator.java" />
- <change type="MODIFICATION" beforePath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/declaration/ClassTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/declaration/ClassTranslator.java" />
- <change type="MODIFICATION" beforePath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/declaration/DeclarationBodyVisitor.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/declaration/DeclarationBodyVisitor.java" />
- <change type="MODIFICATION" beforePath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/declaration/NamespaceTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/declaration/NamespaceTranslator.java" />
- <change type="MODIFICATION" beforePath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/declaration/PropertyTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/declaration/PropertyTranslator.java" />
- <change type="MODIFICATION" beforePath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/initializer/AbstractInitializerTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/initializer/AbstractInitializerTranslator.java" />
- <change type="MODIFICATION" beforePath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/initializer/ClassInitializerTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/initializer/ClassInitializerTranslator.java" />
- <change type="MODIFICATION" beforePath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/initializer/InitializerVisitor.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/initializer/InitializerVisitor.java" />
- <change type="MODIFICATION" beforePath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/initializer/NamespaceInitializerTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/initializer/NamespaceInitializerTranslator.java" />
- <change type="MODIFICATION" beforePath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/utils/ClassSorter.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/utils/ClassSorter.java" />
+ <change type="MODIFICATION" beforePath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/general/Translation.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/general/Translation.java" />
+ <change type="MODIFICATION" beforePath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/reference/PropertyAccessTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/reference/PropertyAccessTranslator.java" />
+ <change type="MODIFICATION" beforePath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/reference/ReferenceProvider.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/reference/ReferenceProvider.java" />
+ <change type="MODIFICATION" beforePath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/reference/ReferenceTranslator.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/reference/ReferenceTranslator.java" />
+ <change type="MODIFICATION" beforePath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/utils/TranslationUtils.java" afterPath="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/utils/TranslationUtils.java" />
</list>
<ignored path="k2js.iws" />
<ignored path=".idea/workspace.xml" />
@@ -160,7 +124,7 @@
<favorites_list name="k2js" />
</component>
<component name="FileEditorManager">
- <splitter split-orientation="horizontal" split-proportion="0.3907068">
+ <splitter split-orientation="horizontal" split-proportion="0.5281414">
<split-first>
<leaf>
<file leaf-file-name="UnaryOperationTranslator.java" pinned="false" current="false" current-in-tab="false">
@@ -176,7 +140,9 @@
<entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/operation/OperationTranslator.java">
<provider selected="true" editor-type-id="text-editor">
<state line="49" column="0" selection-start="1957" selection-end="1957" vertical-scroll-proportion="0.0">
- <folding />
+ <folding>
+ <element signature="imports" expanded="false" />
+ </folding>
</state>
</provider>
</entry>
@@ -220,16 +186,34 @@
<file leaf-file-name="ReferenceProvider.java" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/reference/ReferenceProvider.java">
<provider selected="true" editor-type-id="text-editor">
- <state line="13" column="19" selection-start="468" selection-end="468" vertical-scroll-proportion="0.0">
+ <state line="41" column="59" selection-start="1836" selection-end="1836" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
</file>
- <file leaf-file-name="ReferenceTranslator.java" pinned="false" current="true" current-in-tab="true">
+ <file leaf-file-name="ReferenceTranslator.java" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/reference/ReferenceTranslator.java">
<provider selected="true" editor-type-id="text-editor">
- <state line="30" column="11" selection-start="1117" selection-end="1117" vertical-scroll-proportion="0.30357143">
+ <state line="68" column="83" selection-start="2716" selection-end="2716" vertical-scroll-proportion="0.0">
+ <folding />
+ </state>
+ </provider>
+ </entry>
+ </file>
+ <file leaf-file-name="PropertyAccessTranslator.java" pinned="false" current="true" current-in-tab="true">
+ <entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/reference/PropertyAccessTranslator.java">
+ <provider selected="true" editor-type-id="text-editor">
+ <state line="120" column="100" selection-start="5315" selection-end="5315" vertical-scroll-proportion="0.23214285">
+ <folding />
+ </state>
+ </provider>
+ </entry>
+ </file>
+ <file leaf-file-name="TraitTest.java" pinned="false" current="false" current-in-tab="false">
+ <entry file="file://$PROJECT_DIR$/translator/test/org/jetbrains/k2js/test/TraitTest.java">
+ <provider selected="true" editor-type-id="text-editor">
+ <state line="58" column="60" selection-start="1296" selection-end="1296" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
@@ -251,10 +235,8 @@
<file leaf-file-name="TranslationUtils.java" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/utils/TranslationUtils.java">
<provider selected="true" editor-type-id="text-editor">
- <state line="105" column="5" selection-start="4607" selection-end="4607" vertical-scroll-proportion="0.0">
- <folding>
- <element signature="imports" expanded="false" />
- </folding>
+ <state line="36" column="0" selection-start="1468" selection-end="1468" vertical-scroll-proportion="0.0">
+ <folding />
</state>
</provider>
</entry>
@@ -275,7 +257,9 @@
<entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/declaration/DeclarationBodyVisitor.java">
<provider selected="true" editor-type-id="text-editor">
<state line="55" column="0" selection-start="2216" selection-end="2216" vertical-scroll-proportion="0.0">
- <folding />
+ <folding>
+ <element signature="imports" expanded="false" />
+ </folding>
</state>
</provider>
</entry>
@@ -307,19 +291,19 @@
</provider>
</entry>
</file>
- <file leaf-file-name="Translation.java" pinned="false" current="false" current-in-tab="false">
+ <file leaf-file-name="Translation.java" pinned="false" current="false" current-in-tab="true">
<entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/general/Translation.java">
<provider selected="true" editor-type-id="text-editor">
- <state line="36" column="56" selection-start="1761" selection-end="1761" vertical-scroll-proportion="0.0">
+ <state line="97" column="0" selection-start="4431" selection-end="4431" vertical-scroll-proportion="1.1111112">
<folding />
</state>
</provider>
</entry>
</file>
- <file leaf-file-name="ExpressionVisitor.java" pinned="false" current="false" current-in-tab="true">
+ <file leaf-file-name="ExpressionVisitor.java" pinned="false" current="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/expression/ExpressionVisitor.java">
<provider selected="true" editor-type-id="text-editor">
- <state line="159" column="66" selection-start="7163" selection-end="7163" vertical-scroll-proportion="0.3023622">
+ <state line="159" column="66" selection-start="7163" selection-end="7163" vertical-scroll-proportion="-7.3846154">
<folding />
</state>
</provider>
@@ -355,10 +339,6 @@
<component name="IdeDocumentHistory">
<option name="changedFiles">
<list>
- <option value="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/ReferenceTranslator.java" />
- <option value="$PROJECT_DIR$/translator/test/org/jetbrains/k2js/test/BasicClassTest.java" />
- <option value="$PROJECT_DIR$/translator/testFiles/class/out/propertiesAsParametersInitialized.js" />
- <option value="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/initializer/ClassInitializerTranslator.java" />
<option value="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/TranslationUtils.java" />
<option value="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/TranslationContext.java" />
<option value="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/Translation.java" />
@@ -370,7 +350,11 @@
<option value="$PROJECT_DIR$/translator/testFiles/operatorOverloading/cases/usingModInCaseModAssignNotAvailable.kt" />
<option value="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/OperatorTable.java" />
<option value="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/operation/OperatorTable.java" />
+ <option value="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/reference/ReferenceProvider.java" />
+ <option value="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/utils/TranslationUtils.java" />
<option value="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/reference/ReferenceTranslator.java" />
+ <option value="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/general/Translation.java" />
+ <option value="$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/reference/PropertyAccessTranslator.java" />
</list>
</option>
</component>
@@ -985,7 +969,7 @@
</component>
<component name="ToolWindowManager">
<frame x="-8" y="-8" width="1936" height="1176" extended-state="6" />
- <editor active="false" />
+ <editor active="true" />
<layout>
<window_info id="Changes" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.32853025" sideWeight="0.49812934" order="7" side_tool="false" content_ui="tabs" />
<window_info id="Palette" active="false" anchor="right" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.33" sideWeight="0.5" order="3" side_tool="false" content_ui="tabs" />
@@ -1002,8 +986,8 @@
<window_info id="Structure" active="false" anchor="left" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.25" sideWeight="0.5" order="1" side_tool="true" content_ui="tabs" />
<window_info id="Maven Projects" active="false" anchor="right" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.32977018" sideWeight="0.728146" order="4" side_tool="false" content_ui="tabs" />
<window_info id="Commander" active="false" anchor="right" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.3997862" sideWeight="0.6705998" order="0" side_tool="false" content_ui="tabs" />
- <window_info id="Project" active="true" anchor="left" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="true" weight="0.18332443" sideWeight="0.728146" order="0" side_tool="false" content_ui="tabs" />
- <window_info id="Run" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="true" weight="0.27185398" sideWeight="0.64190274" order="2" side_tool="false" content_ui="tabs" />
+ <window_info id="Project" active="false" anchor="left" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="true" weight="0.18332443" sideWeight="0.728146" order="0" side_tool="false" content_ui="tabs" />
+ <window_info id="Run" active="true" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="true" weight="0.27185398" sideWeight="0.64190274" order="2" side_tool="false" content_ui="tabs" />
<window_info id="Cvs" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.25" sideWeight="0.5" order="4" side_tool="false" content_ui="tabs" />
<window_info id="Message" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.33" sideWeight="0.5" order="0" side_tool="false" content_ui="tabs" />
<window_info id="Metrics" active="false" anchor="bottom" auto_hide="false" internal_type="DOCKED" type="DOCKED" visible="false" weight="0.3275862" sideWeight="0.5" order="9" side_tool="false" content_ui="tabs" />
@@ -1047,7 +1031,7 @@
<option name="INCLUDE_TEXT_INTO_SHELF" value="false" />
<option name="CREATE_PATCH_EXPAND_DETAILS_DEFAULT" value="true" />
<option name="FORCE_NON_EMPTY_COMMENT" value="false" />
- <option name="LAST_COMMIT_MESSAGE" value="Added test." />
+ <option name="LAST_COMMIT_MESSAGE" value="Reorganised project structure." />
<option name="MAKE_NEW_CHANGELIST_ACTIVE" value="true" />
<option name="OPTIMIZE_IMPORTS_BEFORE_PROJECT_COMMIT" value="true" />
<option name="CHECK_FILES_UP_TO_DATE_BEFORE_COMMIT" value="false" />
@@ -1060,7 +1044,6 @@
<option name="UPDATE_GROUP_BY_CHANGELIST" value="false" />
<option name="SHOW_FILE_HISTORY_AS_TREE" value="false" />
<option name="FILE_HISTORY_SPLITTER_PROPORTION" value="0.6" />
- <MESSAGE value="Rewrote name resolving logic. Improved DeclarationExtractor and renamed to Declarations." />
<MESSAGE value="Added support for basic case of function literal." />
<MESSAGE value="Added tests for simple closure examples." />
<MESSAGE value="Added some tests from jet project." />
@@ -1085,6 +1068,7 @@
<MESSAGE value="Added support for initialization of properties as constructor parameters." />
<MESSAGE value="Refactoring function translator." />
<MESSAGE value="Added test." />
+ <MESSAGE value="Reorganised project structure." />
</component>
<component name="XDebuggerManager">
<breakpoint-manager />
@@ -1094,29 +1078,6 @@
<option name="FILTER_TARGETS" value="false" />
</component>
<component name="editorHistoryManager">
- <entry file="file://$PROJECT_DIR$/translator/testFiles/inheritance/out/valuePassedToAncestorConstructor.js">
- <provider selected="true" editor-type-id="text-editor">
- <state line="0" column="0" selection-start="0" selection-end="0" vertical-scroll-proportion="0.0">
- <folding />
- </state>
- </provider>
- </entry>
- <entry file="file://$PROJECT_DIR$/translator/testFiles/class/out/propertiesAsParametersInitialized.js">
- <provider selected="true" editor-type-id="text-editor">
- <state line="3" column="102" selection-start="97" selection-end="97" vertical-scroll-proportion="0.0">
- <folding />
- </state>
- </provider>
- </entry>
- <entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/utils/TranslationUtils.java">
- <provider selected="true" editor-type-id="text-editor">
- <state line="105" column="5" selection-start="4607" selection-end="4607" vertical-scroll-proportion="0.0">
- <folding>
- <element signature="imports" expanded="false" />
- </folding>
- </state>
- </provider>
- </entry>
<entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/general/TranslationContext.java">
<provider selected="true" editor-type-id="text-editor">
<state line="120" column="29" selection-start="4917" selection-end="4917" vertical-scroll-proportion="0.0">
@@ -1131,13 +1092,6 @@
</state>
</provider>
</entry>
- <entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/general/Translation.java">
- <provider selected="true" editor-type-id="text-editor">
- <state line="36" column="56" selection-start="1761" selection-end="1761" vertical-scroll-proportion="0.0">
- <folding />
- </state>
- </provider>
- </entry>
<entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/expression/FunctionTranslator.java">
<provider selected="true" editor-type-id="text-editor">
<state line="70" column="93" selection-start="2680" selection-end="2680" vertical-scroll-proportion="0.0">
@@ -1151,7 +1105,9 @@
<entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/declaration/DeclarationBodyVisitor.java">
<provider selected="true" editor-type-id="text-editor">
<state line="55" column="0" selection-start="2216" selection-end="2216" vertical-scroll-proportion="0.0">
- <folding />
+ <folding>
+ <element signature="imports" expanded="false" />
+ </folding>
</state>
</provider>
</entry>
@@ -1183,30 +1139,58 @@
</state>
</provider>
</entry>
- <entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/reference/ReferenceProvider.java">
+ <entry file="file://$PROJECT_DIR$/translator/testFiles/operatorOverloading/cases/usingModInCaseModAssignNotAvailable.kt">
<provider selected="true" editor-type-id="text-editor">
- <state line="13" column="19" selection-start="468" selection-end="468" vertical-scroll-proportion="0.0">
+ <state line="13" column="30" selection-start="201" selection-end="201" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
- <entry file="file://$PROJECT_DIR$/translator/testFiles/operatorOverloading/cases/usingModInCaseModAssignNotAvailable.kt">
+ <entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/expression/ExpressionVisitor.java">
<provider selected="true" editor-type-id="text-editor">
- <state line="13" column="30" selection-start="201" selection-end="201" vertical-scroll-proportion="0.0">
+ <state line="159" column="66" selection-start="7163" selection-end="7163" vertical-scroll-proportion="-7.3846154">
<folding />
</state>
</provider>
</entry>
- <entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/expression/ExpressionVisitor.java">
+ <entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/reference/ReferenceProvider.java">
<provider selected="true" editor-type-id="text-editor">
- <state line="159" column="66" selection-start="7163" selection-end="7163" vertical-scroll-proportion="0.3023622">
+ <state line="41" column="59" selection-start="1836" selection-end="1836" vertical-scroll-proportion="0.0">
<folding />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/reference/ReferenceTranslator.java">
<provider selected="true" editor-type-id="text-editor">
- <state line="30" column="11" selection-start="1117" selection-end="1117" vertical-scroll-proportion="0.30357143">
+ <state line="68" column="83" selection-start="2716" selection-end="2716" vertical-scroll-proportion="0.0">
+ <folding />
+ </state>
+ </provider>
+ </entry>
+ <entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/utils/TranslationUtils.java">
+ <provider selected="true" editor-type-id="text-editor">
+ <state line="36" column="0" selection-start="1468" selection-end="1468" vertical-scroll-proportion="0.0">
+ <folding />
+ </state>
+ </provider>
+ </entry>
+ <entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/general/Translation.java">
+ <provider selected="true" editor-type-id="text-editor">
+ <state line="97" column="0" selection-start="4431" selection-end="4431" vertical-scroll-proportion="1.1111112">
+ <folding />
+ </state>
+ </provider>
+ </entry>
+ <entry file="file://$PROJECT_DIR$/translator/test/org/jetbrains/k2js/test/TraitTest.java">
+ <provider selected="true" editor-type-id="text-editor">
+ <state line="58" column="60" selection-start="1296" selection-end="1296" vertical-scroll-proportion="0.0">
+ <folding />
+ </state>
+ </provider>
+ </entry>
+ <entry file="file://$PROJECT_DIR$/translator/src/org/jetbrains/k2js/translate/reference/PropertyAccessTranslator.java">
+ <provider selected="true" editor-type-id="text-editor">
+ <state line="120" column="100" selection-start="5315" selection-end="5315" vertical-scroll-proportion="0.23214285">
<folding />
</state>
</provider>
diff --git a/translator/src/org/jetbrains/k2js/translate/general/Translation.java b/translator/src/org/jetbrains/k2js/translate/general/Translation.java
index 9e25f9cb70f12..63f32d507de43 100644
--- a/translator/src/org/jetbrains/k2js/translate/general/Translation.java
+++ b/translator/src/org/jetbrains/k2js/translate/general/Translation.java
@@ -15,7 +15,6 @@
import org.jetbrains.k2js.translate.initializer.ClassInitializerTranslator;
import org.jetbrains.k2js.translate.initializer.NamespaceInitializerTranslator;
import org.jetbrains.k2js.translate.reference.PropertyAccessTranslator;
-import org.jetbrains.k2js.translate.reference.ReferenceProvider;
import org.jetbrains.k2js.translate.reference.ReferenceTranslator;
/**
@@ -96,13 +95,6 @@ static public JsPropertyInitializer generateNamespaceInitializerMethod(@NotNull
return (new NamespaceInitializerTranslator(namespace, context)).generateInitializeMethod();
}
- @NotNull
- static public JsNameRef generateCorrectReference(@NotNull TranslationContext context,
- @NotNull JetSimpleNameExpression expression,
- @NotNull JsName referencedName) {
- return (new ReferenceProvider(context, expression, referencedName)).generateCorrectReference();
- }
-
public static void generateAst(@NotNull JsProgram result, @NotNull BindingContext bindingContext,
@NotNull Declarations declarations, @NotNull JetNamespace namespace) {
JsBlock block = result.getFragmentBlock(0);
diff --git a/translator/src/org/jetbrains/k2js/translate/reference/PropertyAccessTranslator.java b/translator/src/org/jetbrains/k2js/translate/reference/PropertyAccessTranslator.java
index f1d4d5123de62..44d6f941d0ae3 100644
--- a/translator/src/org/jetbrains/k2js/translate/reference/PropertyAccessTranslator.java
+++ b/translator/src/org/jetbrains/k2js/translate/reference/PropertyAccessTranslator.java
@@ -20,7 +20,6 @@
import org.jetbrains.k2js.translate.general.Translation;
import org.jetbrains.k2js.translate.general.TranslationContext;
import org.jetbrains.k2js.translate.utils.BindingUtils;
-import org.jetbrains.k2js.translate.utils.TranslationUtils;
/**
* @author Talanov Pavel
@@ -94,7 +93,7 @@ private JsInvocation resolveAsPropertyGet(@NotNull JetQualifiedExpression expres
@NotNull
private JsInvocation resolveAsPropertyGet(@NotNull JetSimpleNameExpression expression) {
JsName getterName = getNotNullGetterName(expression);
- JsNameRef getterReference = TranslationUtils.getReference(context(), expression, getterName);
+ JsNameRef getterReference = ReferenceProvider.getReference(getterName, context(), expression);
return AstUtil.newInvocation(getterReference);
}
@@ -118,7 +117,7 @@ private JsInvocation resolveAsPropertySet(@NotNull JetDotQualifiedExpression dot
@NotNull
private JsInvocation resolveAsPropertySet(@NotNull JetSimpleNameExpression expression) {
JsName setterName = getNotNullSetterName(expression);
- JsNameRef setterReference = Translation.generateCorrectReference(context(), expression, setterName);
+ JsNameRef setterReference = ReferenceProvider.getReference(setterName, context(), expression);
return AstUtil.newInvocation(setterReference);
}
diff --git a/translator/src/org/jetbrains/k2js/translate/reference/ReferenceProvider.java b/translator/src/org/jetbrains/k2js/translate/reference/ReferenceProvider.java
index 1262f7f2ad8be..7e4e57f1676fa 100644
--- a/translator/src/org/jetbrains/k2js/translate/reference/ReferenceProvider.java
+++ b/translator/src/org/jetbrains/k2js/translate/reference/ReferenceProvider.java
@@ -17,16 +17,29 @@ public final class ReferenceProvider {
private final TranslationContext context;
@NotNull
private final JsName referencedName;
+ private boolean isBackingFieldAccess;
private boolean requiresThisQualifier;
private boolean requiresNamespaceQualifier;
+ public static JsNameRef getReference(@NotNull JsName referencedName, @NotNull TranslationContext context,
+ boolean isBackingFieldAccess) {
+ return (new ReferenceProvider(referencedName, context, isBackingFieldAccess)).generateCorrectReference();
+ }
+
+
+ public static JsNameRef getReference(@NotNull JsName referencedName, @NotNull TranslationContext context,
+ JetSimpleNameExpression expression) {
+ boolean isBackingFieldAccess = expression.getReferencedNameElementType() == JetTokens.FIELD_IDENTIFIER;
+ return (new ReferenceProvider(referencedName, context, isBackingFieldAccess))
+ .generateCorrectReference();
+ }
- public ReferenceProvider(@NotNull TranslationContext context,
- @NotNull JetSimpleNameExpression expression,
- @NotNull JsName referencedName) {
+ private ReferenceProvider(@NotNull JsName referencedName, @NotNull TranslationContext context,
+ boolean isBackingFieldAccess) {
this.context = context;
this.referencedName = referencedName;
- this.requiresThisQualifier = requiresThisQualifier(expression);
+ this.isBackingFieldAccess = isBackingFieldAccess;
+ this.requiresThisQualifier = requiresThisQualifier();
this.requiresNamespaceQualifier = requiresNamespaceQualifier();
}
@@ -44,10 +57,9 @@ private boolean requiresNamespaceQualifier() {
return context.namespaceScope().ownsName(referencedName);
}
- private boolean requiresThisQualifier(@NotNull JetSimpleNameExpression expression) {
+ private boolean requiresThisQualifier() {
JsName name = context.enclosingScope().findExistingName(referencedName.getIdent());
boolean isClassMember = context.classScope().ownsName(name);
- boolean isBackingFieldAccess = expression.getReferencedNameElementType() == JetTokens.FIELD_IDENTIFIER;
return isClassMember || isBackingFieldAccess;
}
}
diff --git a/translator/src/org/jetbrains/k2js/translate/reference/ReferenceTranslator.java b/translator/src/org/jetbrains/k2js/translate/reference/ReferenceTranslator.java
index e474a95066550..9dd63d6753247 100644
--- a/translator/src/org/jetbrains/k2js/translate/reference/ReferenceTranslator.java
+++ b/translator/src/org/jetbrains/k2js/translate/reference/ReferenceTranslator.java
@@ -66,7 +66,7 @@ private JsExpression resolveAsGlobalReference(@NotNull JetSimpleNameExpression e
return null;
}
JsName referencedName = context().getNameForDescriptor(referencedDescriptor);
- return TranslationUtils.getReference(context(), expression, referencedName);
+ return ReferenceProvider.getReference(referencedName, context(), expression);
}
@Nullable
diff --git a/translator/src/org/jetbrains/k2js/translate/utils/TranslationUtils.java b/translator/src/org/jetbrains/k2js/translate/utils/TranslationUtils.java
index 2b48825fb9add..26ec095bc35e5 100644
--- a/translator/src/org/jetbrains/k2js/translate/utils/TranslationUtils.java
+++ b/translator/src/org/jetbrains/k2js/translate/utils/TranslationUtils.java
@@ -7,7 +7,6 @@
import org.jetbrains.jet.lang.descriptors.PropertyDescriptor;
import org.jetbrains.jet.lang.psi.JetExpression;
import org.jetbrains.jet.lang.psi.JetProperty;
-import org.jetbrains.jet.lang.psi.JetSimpleNameExpression;
import org.jetbrains.jet.lang.psi.ValueArgument;
import org.jetbrains.k2js.translate.general.Translation;
import org.jetbrains.k2js.translate.general.TranslationContext;
@@ -35,14 +34,6 @@ static public JsBinaryOperation isNullCheck(@NotNull TranslationContext context,
return AstUtil.equals(expressionToCheck, nullLiteral);
}
- @NotNull
- static public JsNameRef getReference(@NotNull TranslationContext context,
- @NotNull JetSimpleNameExpression expression,
- @NotNull JsName referencedName) {
- return (new ReferenceProvider(context, expression, referencedName)).generateCorrectReference();
- }
-
-
@Nullable
static public JsName getLocalReferencedName(@NotNull TranslationContext context,
@NotNull String name) {
@@ -69,25 +60,24 @@ static public JsExpression translateArgument(@NotNull TranslationContext context
return Translation.translateAsExpression(jetExpression, context);
}
- //TODO: refactor
@NotNull
static public JsNameRef backingFieldReference(@NotNull TranslationContext context,
@NotNull JetProperty expression) {
JsName backingFieldName = getBackingFieldName(getPropertyName(expression), context);
- if (BindingUtils.belongsToNamespace(context.bindingContext(), expression)) {
- return context.getNamespaceQualifiedReference(backingFieldName);
- }
- return AstUtil.thisQualifiedReference(backingFieldName);
+ return generateReference(context, backingFieldName);
}
@NotNull
static public JsNameRef backingFieldReference(@NotNull TranslationContext context,
@NotNull PropertyDescriptor descriptor) {
JsName backingFieldName = getBackingFieldName(descriptor.getName(), context);
- if (BindingUtils.belongsToNamespace(context.bindingContext(), descriptor)) {
- return context.getNamespaceQualifiedReference(backingFieldName);
- }
- return AstUtil.thisQualifiedReference(backingFieldName);
+ return generateReference(context, backingFieldName);
+ }
+
+ @NotNull
+ private static JsNameRef generateReference(@NotNull TranslationContext context,
+ @NotNull JsName backingFieldName) {
+ return ReferenceProvider.getReference(backingFieldName, context, true);
}
@NotNull
@@ -100,7 +90,8 @@ static public String getPropertyName(@NotNull JetProperty expression) {
}
@NotNull
- static private JsName getBackingFieldName(@NotNull String propertyName, @NotNull TranslationContext context) {
+ static private JsName getBackingFieldName(@NotNull String propertyName,
+ @NotNull TranslationContext context) {
String backingFieldName = Namer.getKotlinBackingFieldName(propertyName);
return context.enclosingScope().findExistingName(backingFieldName);
}
|
5122c892b23298f9faa681aa76d0401f7f5ee236
|
orientdb
|
Fixed issue 156 about deep inheritance--
|
c
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/db/graph/ODatabaseGraphTx.java b/core/src/main/java/com/orientechnologies/orient/core/db/graph/ODatabaseGraphTx.java
index 674d49436ba..b0aa582e847 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/db/graph/ODatabaseGraphTx.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/db/graph/ODatabaseGraphTx.java
@@ -148,12 +148,17 @@ public OGraphElement newInstance(final String iClassName) {
else if (iClassName.equals(OGraphEdge.class.getSimpleName()))
return new OGraphEdge(this);
- final OClass cls = getMetadata().getSchema().getClass(iClassName);
- if (cls != null && cls.getSuperClass() != null) {
- if (cls.getSuperClass().getName().equals(OGraphVertex.class.getSimpleName()))
- return new OGraphVertex(this, iClassName);
- else if (cls.getSuperClass().getName().equals(OGraphEdge.class.getSimpleName()))
- return new OGraphEdge(this, iClassName);
+ OClass cls = getMetadata().getSchema().getClass(iClassName);
+ if (cls != null) {
+ cls = cls.getSuperClass();
+ while (cls != null) {
+ if (cls.getName().equals(OGraphVertex.class.getSimpleName()))
+ return new OGraphVertex(this, iClassName);
+ else if (cls.getName().equals(OGraphEdge.class.getSimpleName()))
+ return new OGraphEdge(this, iClassName);
+
+ cls = cls.getSuperClass();
+ }
}
throw new OGraphException("Unrecognized class: " + iClassName);
|
ffd554b271debbdb29ac9f130ca6797f4859a1f9
|
hadoop
|
YARN-1734. Fixed ResourceManager to update the- configurations when it transits from standby to active mode so as to- assimilate any changes that happened while it was in standby mode.- Contributed by Xuan Gong. svn merge --ignore-ancestry -c 1571539 ../../trunk/--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1571540 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 7d48a4fe12ac0..e34dbbf3c04a7 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -333,6 +333,10 @@ Release 2.4.0 - UNRELEASED
re-registration after a RESYNC and thus avoid hanging. (Rohith Sharma via
vinodkv)
+ YARN-1734. Fixed ResourceManager to update the configurations when it
+ transits from standby to active mode so as to assimilate any changes that
+ happened while it was in standby mode. (Xuan Gong via vinodkv)
+
Release 2.3.1 - UNRELEASED
INCOMPATIBLE CHANGES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java
index 70845c775e481..c53d40f54a1db 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java
@@ -250,10 +250,20 @@ public synchronized void monitorHealth()
@Override
public synchronized void transitionToActive(
HAServiceProtocol.StateChangeRequestInfo reqInfo) throws IOException {
+ // call refreshAdminAcls before HA state transition
+ // for the case that adminAcls have been updated in previous active RM
+ try {
+ refreshAdminAcls(false);
+ } catch (YarnException ex) {
+ throw new ServiceFailedException("Can not execute refreshAdminAcls", ex);
+ }
+
UserGroupInformation user = checkAccess("transitionToActive");
checkHaStateChange(reqInfo);
try {
rm.transitionToActive();
+ // call all refresh*s for active RM to get the updated configurations.
+ refreshAll();
RMAuditLogger.logSuccess(user.getShortUserName(),
"transitionToActive", "RMHAProtocolService");
} catch (Exception e) {
@@ -268,6 +278,13 @@ public synchronized void transitionToActive(
@Override
public synchronized void transitionToStandby(
HAServiceProtocol.StateChangeRequestInfo reqInfo) throws IOException {
+ // call refreshAdminAcls before HA state transition
+ // for the case that adminAcls have been updated in previous active RM
+ try {
+ refreshAdminAcls(false);
+ } catch (YarnException ex) {
+ throw new ServiceFailedException("Can not execute refreshAdminAcls", ex);
+ }
UserGroupInformation user = checkAccess("transitionToStandby");
checkHaStateChange(reqInfo);
try {
@@ -406,10 +423,15 @@ public RefreshUserToGroupsMappingsResponse refreshUserToGroupsMappings(
@Override
public RefreshAdminAclsResponse refreshAdminAcls(
RefreshAdminAclsRequest request) throws YarnException, IOException {
+ return refreshAdminAcls(true);
+ }
+
+ private RefreshAdminAclsResponse refreshAdminAcls(boolean checkRMHAState)
+ throws YarnException, IOException {
String argName = "refreshAdminAcls";
UserGroupInformation user = checkAcls(argName);
-
- if (!isRMActive()) {
+
+ if (checkRMHAState && !isRMActive()) {
RMAuditLogger.logFailure(user.getShortUserName(), argName,
adminAcl.toString(), "AdminService",
"ResourceManager is not active. Can not refresh user-groups.");
@@ -521,6 +543,24 @@ private synchronized Configuration getConfiguration(Configuration conf,
return conf;
}
+ private void refreshAll() throws ServiceFailedException {
+ try {
+ refreshQueues(RefreshQueuesRequest.newInstance());
+ refreshNodes(RefreshNodesRequest.newInstance());
+ refreshSuperUserGroupsConfiguration(
+ RefreshSuperUserGroupsConfigurationRequest.newInstance());
+ refreshUserToGroupsMappings(
+ RefreshUserToGroupsMappingsRequest.newInstance());
+ if (getConfig().getBoolean(
+ CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION,
+ false)) {
+ refreshServiceAcls(RefreshServiceAclsRequest.newInstance());
+ }
+ } catch (Exception ex) {
+ throw new ServiceFailedException(ex.getMessage());
+ }
+ }
+
@VisibleForTesting
public AccessControlList getAccessControlList() {
return this.adminAcl;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java
index e67b81f36f27c..60259cddbd558 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java
@@ -34,12 +34,16 @@
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.ha.HAServiceProtocol;
+import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
+import org.apache.hadoop.ha.HAServiceProtocol.StateChangeRequestInfo;
import org.apache.hadoop.security.GroupMappingServiceProvider;
import org.apache.hadoop.security.Groups;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
+import org.apache.hadoop.yarn.conf.HAUtil;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshAdminAclsRequest;
@@ -518,6 +522,94 @@ public void testRefreshNodesWithFileSystemBasedConfigurationProvider()
Assert.assertTrue(excludeHosts.contains("0.0.0.0:123"));
}
+ @Test
+ public void testRMHAWithFileSystemBasedConfiguration() throws IOException,
+ YarnException {
+ StateChangeRequestInfo requestInfo = new StateChangeRequestInfo(
+ HAServiceProtocol.RequestSource.REQUEST_BY_USER);
+ configuration.set(YarnConfiguration.RM_CONFIGURATION_PROVIDER_CLASS,
+ "org.apache.hadoop.yarn.FileSystemBasedConfigurationProvider");
+ configuration.setBoolean(YarnConfiguration.RM_HA_ENABLED, true);
+ configuration.setBoolean(YarnConfiguration.AUTO_FAILOVER_ENABLED, false);
+ configuration.set(YarnConfiguration.RM_HA_IDS, "rm1,rm2");
+ int base = 100;
+ for (String confKey : YarnConfiguration
+ .getServiceAddressConfKeys(configuration)) {
+ configuration.set(HAUtil.addSuffix(confKey, "rm1"), "0.0.0.0:"
+ + (base + 20));
+ configuration.set(HAUtil.addSuffix(confKey, "rm2"), "0.0.0.0:"
+ + (base + 40));
+ base = base * 2;
+ }
+ Configuration conf1 = new Configuration(configuration);
+ conf1.set(YarnConfiguration.RM_HA_ID, "rm1");
+ Configuration conf2 = new Configuration(configuration);
+ conf2.set(YarnConfiguration.RM_HA_ID, "rm2");
+
+ // upload default configurations
+ uploadDefaultConfiguration();
+
+ MockRM rm1 = null;
+ MockRM rm2 = null;
+ try {
+ rm1 = new MockRM(conf1);
+ rm1.init(conf1);
+ rm1.start();
+ Assert.assertTrue(rm1.getRMContext().getHAServiceState()
+ == HAServiceState.STANDBY);
+
+ rm2 = new MockRM(conf2);
+ rm2.init(conf1);
+ rm2.start();
+ Assert.assertTrue(rm2.getRMContext().getHAServiceState()
+ == HAServiceState.STANDBY);
+
+ rm1.adminService.transitionToActive(requestInfo);
+ Assert.assertTrue(rm1.getRMContext().getHAServiceState()
+ == HAServiceState.ACTIVE);
+
+ CapacitySchedulerConfiguration csConf =
+ new CapacitySchedulerConfiguration();
+ csConf.set("yarn.scheduler.capacity.maximum-applications", "5000");
+ uploadConfiguration(csConf, "capacity-scheduler.xml");
+
+ rm1.adminService.refreshQueues(RefreshQueuesRequest.newInstance());
+
+ int maxApps =
+ ((CapacityScheduler) rm1.getRMContext().getScheduler())
+ .getConfiguration().getMaximumSystemApplications();
+ Assert.assertEquals(maxApps, 5000);
+
+ // Before failover happens, the maxApps is
+ // still the default value on the standby rm : rm2
+ int maxAppsBeforeFailOver =
+ ((CapacityScheduler) rm2.getRMContext().getScheduler())
+ .getConfiguration().getMaximumSystemApplications();
+ Assert.assertEquals(maxAppsBeforeFailOver, 10000);
+
+ // Do the failover
+ rm1.adminService.transitionToStandby(requestInfo);
+ rm2.adminService.transitionToActive(requestInfo);
+ Assert.assertTrue(rm1.getRMContext().getHAServiceState()
+ == HAServiceState.STANDBY);
+ Assert.assertTrue(rm2.getRMContext().getHAServiceState()
+ == HAServiceState.ACTIVE);
+
+ int maxAppsAfter =
+ ((CapacityScheduler) rm2.getRMContext().getScheduler())
+ .getConfiguration().getMaximumSystemApplications();
+
+ Assert.assertEquals(maxAppsAfter, 5000);
+ } finally {
+ if (rm1 != null) {
+ rm1.stop();
+ }
+ if (rm2 != null) {
+ rm2.stop();
+ }
+ }
+ }
+
private String writeConfigurationXML(Configuration conf, String confXMLName)
throws IOException {
DataOutputStream output = null;
|
469ae561947ace9a87d971875fbf454a499b542c
|
intellij-community
|
add suggestions for literal expressions- (IDEA-57593)--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/java/java-impl/src/com/intellij/psi/impl/source/codeStyle/JavaCodeStyleManagerImpl.java b/java/java-impl/src/com/intellij/psi/impl/source/codeStyle/JavaCodeStyleManagerImpl.java
index 58932d94d7dba..e228200bef877 100644
--- a/java/java-impl/src/com/intellij/psi/impl/source/codeStyle/JavaCodeStyleManagerImpl.java
+++ b/java/java-impl/src/com/intellij/psi/impl/source/codeStyle/JavaCodeStyleManagerImpl.java
@@ -644,6 +644,11 @@ else if (expr instanceof PsiLiteralExpression && variableKind == VariableKind.ST
return suggestVariableNameByExpressionOnly(((PsiParenthesizedExpression)expr).getExpression(), variableKind);
} else if (expr instanceof PsiTypeCastExpression) {
return suggestVariableNameByExpressionOnly(((PsiTypeCastExpression)expr).getOperand(), variableKind);
+ } else if (expr instanceof PsiLiteralExpression) {
+ final String text = StringUtil.stripQuotesAroundValue(expr.getText());
+ if (isIdentifier(text)) {
+ return new NamesByExprInfo(text, getSuggestionsByName(text, variableKind, false));
+ }
}
return new NamesByExprInfo(null, ArrayUtil.EMPTY_STRING_ARRAY);
|
7db30f8428ef341cc39b2758d3bd6dcccc25b080
|
hadoop
|
MAPREDUCE-3345. Fixed a race condition in- ResourceManager that was causing TestContainerManagerSecurity to fail- sometimes. Contributed by Hitesh Shah. svn merge -c r1199144- --ignore-ancestry ../../trunk/--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1199145 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt
index 43cb6b7dd248c..e1034c880ef65 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -39,6 +39,9 @@ Release 0.23.1 - Unreleased
MAPREDUCE-3342. Fixed JobHistoryServer to also show the job's queue
name. (Jonathan Eagles via vinodkv)
+ MAPREDUCE-3345. Fixed a race condition in ResourceManager that was causing
+ TestContainerManagerSecurity to fail sometimes. (Hitesh Shah via vinodkv)
+
Release 0.23.0 - 2011-11-01
INCOMPATIBLE CHANGES
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
index 0d81f80121213..71dd982b607af 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
@@ -595,8 +595,13 @@ public void transition(RMAppAttemptImpl appAttempt,
AM_CONTAINER_PRIORITY, "*", appAttempt.submissionContext
.getAMContainerSpec().getResource(), 1);
- appAttempt.scheduler.allocate(appAttempt.applicationAttemptId,
- Collections.singletonList(request), EMPTY_CONTAINER_RELEASE_LIST);
+ Allocation amContainerAllocation =
+ appAttempt.scheduler.allocate(appAttempt.applicationAttemptId,
+ Collections.singletonList(request), EMPTY_CONTAINER_RELEASE_LIST);
+ if (amContainerAllocation != null
+ && amContainerAllocation.getContainers() != null) {
+ assert(amContainerAllocation.getContainers().size() == 0);
+ }
}
}
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/FifoScheduler.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/FifoScheduler.java
index c61c7ab89f0ec..977150520a195 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/FifoScheduler.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/FifoScheduler.java
@@ -236,28 +236,30 @@ public Allocation allocate(
RMContainerEventType.RELEASED);
}
- if (!ask.isEmpty()) {
- LOG.debug("allocate: pre-update" +
- " applicationId=" + applicationAttemptId +
- " application=" + application);
- application.showRequests();
-
- // Update application requests
- application.updateResourceRequests(ask);
-
- LOG.debug("allocate: post-update" +
- " applicationId=" + applicationAttemptId +
- " application=" + application);
- application.showRequests();
+ synchronized (application) {
+ if (!ask.isEmpty()) {
+ LOG.debug("allocate: pre-update" +
+ " applicationId=" + applicationAttemptId +
+ " application=" + application);
+ application.showRequests();
+
+ // Update application requests
+ application.updateResourceRequests(ask);
+
+ LOG.debug("allocate: post-update" +
+ " applicationId=" + applicationAttemptId +
+ " application=" + application);
+ application.showRequests();
+
+ LOG.debug("allocate:" +
+ " applicationId=" + applicationAttemptId +
+ " #ask=" + ask.size());
+ }
- LOG.debug("allocate:" +
- " applicationId=" + applicationAttemptId +
- " #ask=" + ask.size());
+ return new Allocation(
+ application.pullNewlyAllocatedContainers(),
+ application.getHeadroom());
}
-
- return new Allocation(
- application.pullNewlyAllocatedContainers(),
- application.getHeadroom());
}
private SchedulerApp getApplication(
|
e7371d2c1afc6b73b9a4ce845e363d5ce8d9c94f
|
kotlin
|
Minor, move boxAgainstJava logic to a separate- test class--
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/tests/org/jetbrains/kotlin/codegen/AbstractBlackBoxAgainstJavaCodegenTest.kt b/compiler/tests/org/jetbrains/kotlin/codegen/AbstractBlackBoxAgainstJavaCodegenTest.kt
new file mode 100644
index 0000000000000..0aee0ba4af2df
--- /dev/null
+++ b/compiler/tests/org/jetbrains/kotlin/codegen/AbstractBlackBoxAgainstJavaCodegenTest.kt
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2010-2016 JetBrains s.r.o.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.jetbrains.kotlin.codegen
+
+import com.intellij.ide.highlighter.JavaFileType
+import org.jetbrains.kotlin.cli.jvm.compiler.EnvironmentConfigFiles
+import org.jetbrains.kotlin.cli.jvm.compiler.KotlinCoreEnvironment
+import org.jetbrains.kotlin.test.ConfigurationKind
+import org.jetbrains.kotlin.test.KotlinTestUtils
+import org.jetbrains.kotlin.test.TestJdkKind
+import java.io.File
+
+abstract class AbstractBlackBoxAgainstJavaCodegenTest : AbstractBlackBoxCodegenTest() {
+ override fun createEnvironment(jdkKind: TestJdkKind, javaSourceDir: File?) {
+ val javaOutputDir = javaSourceDir?.let { javaSourceDir ->
+ val javaSourceFilePaths = javaSourceDir.walk().mapNotNull { file ->
+ if (file.isFile && file.extension == JavaFileType.DEFAULT_EXTENSION) {
+ file.path
+ }
+ else null
+ }.toList()
+
+ CodegenTestUtil.compileJava(javaSourceFilePaths, emptyList(), emptyList())
+ }
+
+ val configuration = KotlinTestUtils.compilerConfigurationForTests(
+ ConfigurationKind.ALL, jdkKind, KotlinTestUtils.getAnnotationsJar(), javaOutputDir
+ )
+
+ myEnvironment = KotlinCoreEnvironment.createForTests(testRootDisposable, configuration, EnvironmentConfigFiles.JVM_CONFIG_FILES)
+ }
+}
diff --git a/compiler/tests/org/jetbrains/kotlin/codegen/AbstractBlackBoxCodegenTest.java b/compiler/tests/org/jetbrains/kotlin/codegen/AbstractBlackBoxCodegenTest.java
index d00eb3376bc48..ee43aaba2a354 100644
--- a/compiler/tests/org/jetbrains/kotlin/codegen/AbstractBlackBoxCodegenTest.java
+++ b/compiler/tests/org/jetbrains/kotlin/codegen/AbstractBlackBoxCodegenTest.java
@@ -83,7 +83,7 @@ protected void doTestWithStdlib(@NotNull String filename) {
blackBoxFileByFullPath(filename);
}
- private void doTestMultiFile(@NotNull List<TestFile> files, @Nullable File javaSourceDir) throws Exception {
+ private void doTestMultiFile(@NotNull List<TestFile> files, @Nullable File javaSourceDir) {
TestJdkKind jdkKind = TestJdkKind.MOCK_JDK;
for (TestFile file : files) {
if (isFullJdkDirectiveDefined(file.content)) {
@@ -92,33 +92,15 @@ private void doTestMultiFile(@NotNull List<TestFile> files, @Nullable File javaS
}
}
- File javaOutputDir;
- if (javaSourceDir != null) {
- final List<String> javaSourceFilePaths = new ArrayList<String>();
- FileUtil.processFilesRecursively(javaSourceDir, new Processor<File>() {
- @Override
- public boolean process(File file) {
- if (file.isFile() && file.getName().endsWith(".java")) {
- javaSourceFilePaths.add(file.getPath());
- }
- return true;
- }
- });
-
- javaOutputDir = compileJava(javaSourceFilePaths, Collections.<String>emptyList(), Collections.<String>emptyList());
- }
- else {
- javaOutputDir = null;
- }
-
- CompilerConfiguration configuration =
- compilerConfigurationForTests(ConfigurationKind.ALL, jdkKind, getAnnotationsJar(), javaOutputDir);
+ createEnvironment(jdkKind, javaSourceDir);
+ loadMultiFiles(files);
+ blackBox();
+ }
+ protected void createEnvironment(@NotNull TestJdkKind jdkKind, @Nullable File javaSourceDir) {
+ CompilerConfiguration configuration = compilerConfigurationForTests(ConfigurationKind.ALL, jdkKind, getAnnotationsJar());
myEnvironment =
KotlinCoreEnvironment.createForTests(getTestRootDisposable(), configuration, EnvironmentConfigFiles.JVM_CONFIG_FILES);
-
- loadMultiFiles(files);
- blackBox();
}
// NOTE: tests under fullJdk/ are run with FULL_JDK instead of MOCK_JDK
diff --git a/compiler/tests/org/jetbrains/kotlin/codegen/BlackBoxAgainstJavaCodegenTestGenerated.java b/compiler/tests/org/jetbrains/kotlin/codegen/BlackBoxAgainstJavaCodegenTestGenerated.java
index 30a81608dbda8..2c163e07c8613 100644
--- a/compiler/tests/org/jetbrains/kotlin/codegen/BlackBoxAgainstJavaCodegenTestGenerated.java
+++ b/compiler/tests/org/jetbrains/kotlin/codegen/BlackBoxAgainstJavaCodegenTestGenerated.java
@@ -30,7 +30,7 @@
@TestMetadata("compiler/testData/codegen/boxAgainstJava")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
-public class BlackBoxAgainstJavaCodegenTestGenerated extends AbstractBlackBoxCodegenTest {
+public class BlackBoxAgainstJavaCodegenTestGenerated extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInBoxAgainstJava() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -38,7 +38,7 @@ public void testAllFilesPresentInBoxAgainstJava() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/annotations")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class Annotations extends AbstractBlackBoxCodegenTest {
+ public static class Annotations extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInAnnotations() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/annotations"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -94,7 +94,7 @@ public void testRetentionInJava() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/annotations/kClassMapping")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class KClassMapping extends AbstractBlackBoxCodegenTest {
+ public static class KClassMapping extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInKClassMapping() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/annotations/kClassMapping"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -140,7 +140,7 @@ public void testVarargClassParameterOnJavaClass() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/callableReference")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class CallableReference extends AbstractBlackBoxCodegenTest {
+ public static class CallableReference extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInCallableReference() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/callableReference"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -173,7 +173,7 @@ public void testStaticMethod() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/constructor")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class Constructor extends AbstractBlackBoxCodegenTest {
+ public static class Constructor extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInConstructor() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/constructor"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -194,7 +194,7 @@ public void testSecondaryConstructor() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/delegation")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class Delegation extends AbstractBlackBoxCodegenTest {
+ public static class Delegation extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInDelegation() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/delegation"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -209,7 +209,7 @@ public void testDelegationAndInheritanceFromJava() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/enum")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class Enum extends AbstractBlackBoxCodegenTest {
+ public static class Enum extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInEnum() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/enum"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -254,7 +254,7 @@ public void testStaticMethod() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/functions")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class Functions extends AbstractBlackBoxCodegenTest {
+ public static class Functions extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInFunctions() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/functions"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -293,7 +293,7 @@ public void testUnrelatedUpperBounds() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/innerClass")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class InnerClass extends AbstractBlackBoxCodegenTest {
+ public static class InnerClass extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInInnerClass() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/innerClass"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -320,7 +320,7 @@ public void testKt4036() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/interfaces")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class Interfaces extends AbstractBlackBoxCodegenTest {
+ public static class Interfaces extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInInterfaces() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/interfaces"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -335,7 +335,7 @@ public void testInheritJavaInterface() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/notNullAssertions")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class NotNullAssertions extends AbstractBlackBoxCodegenTest {
+ public static class NotNullAssertions extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInNotNullAssertions() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/notNullAssertions"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -356,7 +356,7 @@ public void testRightElvisOperand() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/platformTypes")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class PlatformTypes extends AbstractBlackBoxCodegenTest {
+ public static class PlatformTypes extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInPlatformTypes() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/platformTypes"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -371,7 +371,7 @@ public void testGenericUnit() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/property")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class Property extends AbstractBlackBoxCodegenTest {
+ public static class Property extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInProperty() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/property"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -398,7 +398,7 @@ public void testReferenceToJavaFieldViaBridge() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/reflection")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class Reflection extends AbstractBlackBoxCodegenTest {
+ public static class Reflection extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInReflection() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/reflection"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -406,7 +406,7 @@ public void testAllFilesPresentInReflection() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/reflection/classLiterals")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class ClassLiterals extends AbstractBlackBoxCodegenTest {
+ public static class ClassLiterals extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInClassLiterals() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/reflection/classLiterals"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -421,7 +421,7 @@ public void testJavaClassLiteral() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/reflection/mapping")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class Mapping extends AbstractBlackBoxCodegenTest {
+ public static class Mapping extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInMapping() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/reflection/mapping"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -454,7 +454,7 @@ public void testJavaMethods() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/reflection/properties")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class Properties extends AbstractBlackBoxCodegenTest {
+ public static class Properties extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInProperties() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/reflection/properties"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -470,7 +470,7 @@ public void testEqualsHashCodeToString() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/sam")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class Sam extends AbstractBlackBoxCodegenTest {
+ public static class Sam extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInSam() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/sam"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -502,7 +502,7 @@ public void testSamConstructorGenericSignature() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/sam/adapters")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class Adapters extends AbstractBlackBoxCodegenTest {
+ public static class Adapters extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInAdapters() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/sam/adapters"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -678,7 +678,7 @@ public void testTypeParameterOfOuterClass() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/sam/adapters/operators")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class Operators extends AbstractBlackBoxCodegenTest {
+ public static class Operators extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInOperators() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/sam/adapters/operators"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -755,7 +755,7 @@ public void testSet() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/specialBuiltins")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class SpecialBuiltins extends AbstractBlackBoxCodegenTest {
+ public static class SpecialBuiltins extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInSpecialBuiltins() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/specialBuiltins"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -770,7 +770,7 @@ public void testCharBuffer() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/staticFun")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class StaticFun extends AbstractBlackBoxCodegenTest {
+ public static class StaticFun extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInStaticFun() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/staticFun"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -785,7 +785,7 @@ public void testClassWithNestedEnum() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/syntheticExtensions")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class SyntheticExtensions extends AbstractBlackBoxCodegenTest {
+ public static class SyntheticExtensions extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInSyntheticExtensions() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/syntheticExtensions"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -854,7 +854,7 @@ public void testSetterNonVoid2() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/visibility")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class Visibility extends AbstractBlackBoxCodegenTest {
+ public static class Visibility extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInVisibility() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/visibility"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -862,7 +862,7 @@ public void testAllFilesPresentInVisibility() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/visibility/package")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class Package extends AbstractBlackBoxCodegenTest {
+ public static class Package extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInPackage() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/visibility/package"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -895,7 +895,7 @@ public void testPackageProperty() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/visibility/protectedAndPackage")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class ProtectedAndPackage extends AbstractBlackBoxCodegenTest {
+ public static class ProtectedAndPackage extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInProtectedAndPackage() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/visibility/protectedAndPackage"), Pattern.compile("^(.+)\\.kt$"), true);
}
@@ -928,7 +928,7 @@ public void testProtectedStaticClass() throws Exception {
@TestMetadata("compiler/testData/codegen/boxAgainstJava/visibility/protectedStatic")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
- public static class ProtectedStatic extends AbstractBlackBoxCodegenTest {
+ public static class ProtectedStatic extends AbstractBlackBoxAgainstJavaCodegenTest {
public void testAllFilesPresentInProtectedStatic() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadata(this.getClass(), new File("compiler/testData/codegen/boxAgainstJava/visibility/protectedStatic"), Pattern.compile("^(.+)\\.kt$"), true);
}
diff --git a/generators/src/org/jetbrains/kotlin/generators/tests/GenerateTests.kt b/generators/src/org/jetbrains/kotlin/generators/tests/GenerateTests.kt
index 8d920dcd37c36..e3e5247d2adfa 100644
--- a/generators/src/org/jetbrains/kotlin/generators/tests/GenerateTests.kt
+++ b/generators/src/org/jetbrains/kotlin/generators/tests/GenerateTests.kt
@@ -111,7 +111,10 @@ import org.jetbrains.kotlin.jps.build.*
import org.jetbrains.kotlin.jps.build.android.AbstractAndroidJpsTestCase
import org.jetbrains.kotlin.jps.incremental.AbstractProtoComparisonTest
import org.jetbrains.kotlin.js.test.semantics.*
-import org.jetbrains.kotlin.jvm.compiler.*
+import org.jetbrains.kotlin.jvm.compiler.AbstractCompileJavaAgainstKotlinTest
+import org.jetbrains.kotlin.jvm.compiler.AbstractLoadJavaTest
+import org.jetbrains.kotlin.jvm.compiler.AbstractLoadKotlinWithTypeTableTest
+import org.jetbrains.kotlin.jvm.compiler.AbstractWriteSignatureTest
import org.jetbrains.kotlin.jvm.runtime.AbstractJvmRuntimeDescriptorLoaderTest
import org.jetbrains.kotlin.lang.resolve.android.test.AbstractAndroidBoxTest
import org.jetbrains.kotlin.lang.resolve.android.test.AbstractAndroidBytecodeShapeTest
@@ -205,7 +208,7 @@ fun main(args: Array<String>) {
model("codegen/boxMultiFile")
}
- testClass<AbstractBlackBoxCodegenTest>("BlackBoxAgainstJavaCodegenTestGenerated") {
+ testClass<AbstractBlackBoxAgainstJavaCodegenTest>() {
model("codegen/boxAgainstJava")
}
|
28725723398943b2b51cb38d6fe92a3aadf4dee6
|
drools
|
-now works with non DroolsObjectInputStream- serialization.--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@13207 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
a
|
https://github.com/kiegroup/drools
|
diff --git a/drools-core/src/main/java/org/drools/common/AbstractRuleBase.java b/drools-core/src/main/java/org/drools/common/AbstractRuleBase.java
index 6bc15ce0bf7..f3eb3560ed7 100644
--- a/drools-core/src/main/java/org/drools/common/AbstractRuleBase.java
+++ b/drools-core/src/main/java/org/drools/common/AbstractRuleBase.java
@@ -173,21 +173,27 @@ public void doWriteExternal(final ObjectOutput stream,
public void doReadExternal(final ObjectInput stream,
final Object[] objects) throws IOException,
ClassNotFoundException {
- // PackageCompilationData must be restored before Rules as it has the ClassLoader needed to resolve the generated code references in Rules
- DroolsObjectInputStream parentStream = (DroolsObjectInputStream) stream;
- parentStream.setRuleBase( this );
- this.pkgs = (Map) parentStream.readObject();
-
- this.packageClassLoader = new CompositePackageClassLoader( parentStream.getClassLoader() );
- for ( final Iterator it = this.pkgs.values().iterator(); it.hasNext(); ) {
- this.packageClassLoader.addClassLoader( ((Package) it.next()).getPackageCompilationData().getClassLoader() );
+ // PackageCompilationData must be restored before Rules as it has the ClassLoader needed to resolve the generated code references in Rules
+ this.pkgs = (Map) stream.readObject();
+
+ if ( stream instanceof DroolsObjectInputStream ) {
+ DroolsObjectInputStream parentStream = (DroolsObjectInputStream) stream;
+ parentStream.setRuleBase( this );
+ this.packageClassLoader = new CompositePackageClassLoader( parentStream.getClassLoader() );
+ this.classLoader = new MapBackedClassLoader( parentStream.getClassLoader() );
+ } else {
+ this.packageClassLoader = new CompositePackageClassLoader( Thread.currentThread().getContextClassLoader() );
+ this.classLoader = new MapBackedClassLoader( Thread.currentThread().getContextClassLoader() );
}
-
- this.classLoader = new MapBackedClassLoader( parentStream.getClassLoader() );
+
this.packageClassLoader.addClassLoader( this.classLoader );
+
+ for ( final Iterator it = this.pkgs.values().iterator(); it.hasNext(); ) {
+ this.packageClassLoader.addClassLoader( ((Package) it.next()).getPackageCompilationData().getClassLoader() );
+ }
// Return the rules stored as a byte[]
- final byte[] bytes = (byte[]) parentStream.readObject();
+ final byte[] bytes = (byte[]) stream.readObject();
// Use a custom ObjectInputStream that can resolve against a given classLoader
final DroolsObjectInputStream childStream = new DroolsObjectInputStream( new ByteArrayInputStream( bytes ),
diff --git a/drools-core/src/main/java/org/drools/reteoo/Rete.java b/drools-core/src/main/java/org/drools/reteoo/Rete.java
index 4569c0cf677..fa600fb6555 100644
--- a/drools-core/src/main/java/org/drools/reteoo/Rete.java
+++ b/drools-core/src/main/java/org/drools/reteoo/Rete.java
@@ -92,10 +92,6 @@ public Rete(InternalRuleBase ruleBase) {
this.ruleBase = ruleBase;
}
- public void setRuleBase(InternalRuleBase ruleBase) {
- this.ruleBase = ruleBase;
- }
-
private void readObject(ObjectInputStream stream) throws IOException,
ClassNotFoundException {
stream.defaultReadObject();
diff --git a/drools-core/src/main/java/org/drools/reteoo/ReteooBuilder.java b/drools-core/src/main/java/org/drools/reteoo/ReteooBuilder.java
index 7abe9b8f180..051d62f1ef1 100644
--- a/drools-core/src/main/java/org/drools/reteoo/ReteooBuilder.java
+++ b/drools-core/src/main/java/org/drools/reteoo/ReteooBuilder.java
@@ -97,14 +97,6 @@ private void readObject(ObjectInputStream stream) throws IOException,
this.ruleBase = ((DroolsObjectInputStream) stream).getRuleBase();
}
- /**
- * Allow this to be settable, otherwise we get infinite recursion on serialisation
- * @param ruleBase
- */
- void setRete(final Rete rete) {
-
- }
-
// ------------------------------------------------------------
// Instance methods
// ------------------------------------------------------------
diff --git a/drools-core/src/main/java/org/drools/reteoo/ReteooRuleBase.java b/drools-core/src/main/java/org/drools/reteoo/ReteooRuleBase.java
index 13bc46e5613..d6e1bd47778 100644
--- a/drools-core/src/main/java/org/drools/reteoo/ReteooRuleBase.java
+++ b/drools-core/src/main/java/org/drools/reteoo/ReteooRuleBase.java
@@ -155,8 +155,6 @@ public void readExternal(final ObjectInput stream) throws IOException,
this.rete = (Rete) objects[0];
this.reteooBuilder = (ReteooBuilder) objects[1];
-
- this.reteooBuilder.setRete( this.rete );
}
// ------------------------------------------------------------
diff --git a/drools-core/src/main/java/org/drools/rule/Package.java b/drools-core/src/main/java/org/drools/rule/Package.java
index f1692436631..f6b2fa94c78 100644
--- a/drools-core/src/main/java/org/drools/rule/Package.java
+++ b/drools-core/src/main/java/org/drools/rule/Package.java
@@ -126,7 +126,7 @@ public Package(final String name,
this.globals = Collections.EMPTY_MAP;
this.factTemplates = Collections.EMPTY_MAP;
this.functions = Collections.EMPTY_LIST;
-
+
// This classloader test should only be here for unit testing, too much legacy api to want to change by hand at the moment
if ( parentClassLoader == null ) {
parentClassLoader = Thread.currentThread().getContextClassLoader();
@@ -149,7 +149,7 @@ public void writeExternal(final ObjectOutput stream) throws IOException {
stream.writeObject( this.staticImports );
stream.writeObject( this.globals );
stream.writeObject( this.ruleFlows );
-
+
// Rules must be restored by an ObjectInputStream that can resolve using a given ClassLoader to handle seaprately by storing as
// a byte[]
final ByteArrayOutputStream bos = new ByteArrayOutputStream();
@@ -173,13 +173,13 @@ public void readExternal(final ObjectInput stream) throws IOException,
this.staticImports = (List) stream.readObject();
this.globals = (Map) stream.readObject();
this.ruleFlows = (Map) stream.readObject();
-
+
// Return the rules stored as a byte[]
final byte[] bytes = (byte[]) stream.readObject();
// Use a custom ObjectInputStream that can resolve against a given classLoader
final DroolsObjectInputStream streamWithLoader = new DroolsObjectInputStream( new ByteArrayInputStream( bytes ),
- this.packageCompilationData.getClassLoader() );
+ this.packageCompilationData.getClassLoader() );
this.rules = (Map) streamWithLoader.readObject();
}
@@ -289,17 +289,18 @@ public void addRule(final Rule rule) {
rule );
rule.setLoadOrder( this.rules.size() );
}
-
+
/**
* Add a rule flow to this package.
*/
public void addRuleFlow(Process process) {
- if (this.ruleFlows == Collections.EMPTY_MAP) {
+ if ( this.ruleFlows == Collections.EMPTY_MAP ) {
this.ruleFlows = new HashMap();
}
- this.ruleFlows.put(process.getId(), process );
+ this.ruleFlows.put( process.getId(),
+ process );
}
-
+
/**
* Get the rule flows for this package. The key is the ruleflow id.
* It will be Collections.EMPTY_MAP if none have been added.
@@ -307,18 +308,16 @@ public void addRuleFlow(Process process) {
public Map getRuleFlows() {
return this.ruleFlows;
}
-
-
+
/**
* Rule flows can be removed by ID.
*/
public void removeRuleFlow(String id) {
- if (!this.ruleFlows.containsKey( id )) {
- throw new IllegalArgumentException("The rule flow with id [" + id + "] is not part of this package.");
+ if ( !this.ruleFlows.containsKey( id ) ) {
+ throw new IllegalArgumentException( "The rule flow with id [" + id + "] is not part of this package." );
}
this.ruleFlows.remove( id );
}
-
public void removeRule(final Rule rule) {
this.rules.remove( rule.getName() );
diff --git a/drools-core/src/main/java/org/drools/rule/PackageCompilationData.java b/drools-core/src/main/java/org/drools/rule/PackageCompilationData.java
index 265f6c16147..dd204704edd 100644
--- a/drools-core/src/main/java/org/drools/rule/PackageCompilationData.java
+++ b/drools-core/src/main/java/org/drools/rule/PackageCompilationData.java
@@ -125,8 +125,12 @@ public void writeExternal(final ObjectOutput stream) throws IOException {
*/
public void readExternal(final ObjectInput stream) throws IOException,
ClassNotFoundException {
- DroolsObjectInputStream droolsStream = ( DroolsObjectInputStream ) stream;
- initClassLoader( droolsStream.getClassLoader() );
+ if ( stream instanceof DroolsObjectInputStream ) {
+ DroolsObjectInputStream droolsStream = ( DroolsObjectInputStream ) stream;
+ initClassLoader( droolsStream.getClassLoader() );
+ } else {
+ initClassLoader( Thread.currentThread().getContextClassLoader() );
+ }
this.store = (Map) stream.readObject();
this.AST = stream.readObject();
|
015284af7cf8eff979cd0c5e5ad3d189915e98a9
|
spring-framework
|
+ add implicit (String) type for typed string- values w/o a specified type--
|
a
|
https://github.com/spring-projects/spring-framework
|
diff --git a/org.springframework.beans/src/main/java/org/springframework/beans/factory/support/ConstructorResolver.java b/org.springframework.beans/src/main/java/org/springframework/beans/factory/support/ConstructorResolver.java
index 30b93f7f598a..d390803b15fd 100644
--- a/org.springframework.beans/src/main/java/org/springframework/beans/factory/support/ConstructorResolver.java
+++ b/org.springframework.beans/src/main/java/org/springframework/beans/factory/support/ConstructorResolver.java
@@ -681,14 +681,7 @@ private ArgumentsHolder createArgumentArray(
}
}
args.arguments[paramIndex] = convertedValue;
- if (mbd.isLenientConstructorResolution()) {
- args.rawArguments[paramIndex] = originalValue;
- }
- else {
- args.rawArguments[paramIndex] =
- ((sourceValue instanceof TypedStringValue && !((TypedStringValue) sourceValue).hasTargetType()) ?
- convertedValue : originalValue);
- }
+ args.rawArguments[paramIndex] = originalValue;
}
else {
// No explicit match found: we're either supposed to autowire or
|
5fee1b116bcd427168f1fafc7948c2e44520cc5c
|
intellij-community
|
PY-16335 Preserve formatting of converted- collection literals--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/python/src/com/jetbrains/python/codeInsight/intentions/PyBaseConvertCollectionLiteralIntention.java b/python/src/com/jetbrains/python/codeInsight/intentions/PyBaseConvertCollectionLiteralIntention.java
index 0ac0409edf1f4..daa669af001e3 100644
--- a/python/src/com/jetbrains/python/codeInsight/intentions/PyBaseConvertCollectionLiteralIntention.java
+++ b/python/src/com/jetbrains/python/codeInsight/intentions/PyBaseConvertCollectionLiteralIntention.java
@@ -98,16 +98,17 @@ public void invoke(@NotNull Project project, Editor editor, PsiFile file) throws
replacedElement = literal;
}
+ final String innerText = stripLiteralBraces(replacedElement);
final PyElementGenerator elementGenerator = PyElementGenerator.getInstance(project);
final PyExpression newLiteral = elementGenerator.createExpressionFromText(LanguageLevel.forElement(file),
- myLeftBrace + stripLiteralBraces(literal) + myRightBrace);
+ myLeftBrace + innerText + myRightBrace);
replacedElement.replace(newLiteral);
}
@NotNull
- private static String stripLiteralBraces(@NotNull PySequenceExpression literal) {
+ private static String stripLiteralBraces(@NotNull PsiElement literal) {
if (literal instanceof PyTupleExpression) {
- return literal.getText().trim();
+ return literal.getText();
}
final PsiElement firstChild = literal.getFirstChild();
@@ -130,7 +131,7 @@ private static String stripLiteralBraces(@NotNull PySequenceExpression literal)
contentEndOffset = replacedText.length();
}
- return literal.getText().substring(contentStartOffset, contentEndOffset).trim();
+ return literal.getText().substring(contentStartOffset, contentEndOffset);
}
@Nullable
diff --git a/python/testData/intentions/PyConvertCollectionLiteralIntentionTest/convertLiteralPreservesFormattingAndComments.py b/python/testData/intentions/PyConvertCollectionLiteralIntentionTest/convertLiteralPreservesFormattingAndComments.py
new file mode 100644
index 0000000000000..0c688956c9b78
--- /dev/null
+++ b/python/testData/intentions/PyConvertCollectionLiteralIntentionTest/convertLiteralPreservesFormattingAndComments.py
@@ -0,0 +1,4 @@
+xs = (<caret>
+ 1, 2, # comment 1
+ 3 # comment 2
+)
\ No newline at end of file
diff --git a/python/testData/intentions/PyConvertCollectionLiteralIntentionTest/convertLiteralPreservesFormattingAndComments_after.py b/python/testData/intentions/PyConvertCollectionLiteralIntentionTest/convertLiteralPreservesFormattingAndComments_after.py
new file mode 100644
index 0000000000000..adccc398ae06a
--- /dev/null
+++ b/python/testData/intentions/PyConvertCollectionLiteralIntentionTest/convertLiteralPreservesFormattingAndComments_after.py
@@ -0,0 +1,4 @@
+xs = [
+ 1, 2, # comment 1
+ 3 # comment 2
+]
\ No newline at end of file
diff --git a/python/testSrc/com/jetbrains/python/intentions/PyConvertCollectionLiteralIntentionTest.java b/python/testSrc/com/jetbrains/python/intentions/PyConvertCollectionLiteralIntentionTest.java
index 0e50c05e8d1f3..7b10e2bd57cf1 100644
--- a/python/testSrc/com/jetbrains/python/intentions/PyConvertCollectionLiteralIntentionTest.java
+++ b/python/testSrc/com/jetbrains/python/intentions/PyConvertCollectionLiteralIntentionTest.java
@@ -103,4 +103,9 @@ public void testConvertSetWithoutClosingBraceToTuple() {
public void testConvertSetToList() {
doIntentionTest(CONVERT_SET_TO_LIST);
}
+
+ // PY-16335
+ public void testConvertLiteralPreservesFormattingAndComments() {
+ doIntentionTest(CONVERT_TUPLE_TO_LIST);
+ }
}
|
ed77c8925d7126f9ea3c8d9cbb1e246ad61ce37c
|
hadoop
|
YARN-596. Use scheduling policies throughout the- queue hierarchy to decide which containers to preempt (Wei Yan via Sandy- Ryza)--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1598198 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 6d2638927c350..2ca64fb4ea647 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -99,6 +99,9 @@ Release 2.5.0 - UNRELEASED
YARN-2107. Refactored timeline classes into o.a.h.y.s.timeline package. (Vinod
Kumar Vavilapalli via zjshen)
+ YARN-596. Use scheduling policies throughout the queue hierarchy to decide
+ which containers to preempt (Wei Yan via Sandy Ryza)
+
OPTIMIZATIONS
BUG FIXES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AppSchedulable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AppSchedulable.java
index 9ed5179270a66..4dc0bf4ceb870 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AppSchedulable.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AppSchedulable.java
@@ -18,8 +18,10 @@
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair;
+import java.io.Serializable;
import java.util.Arrays;
import java.util.Collection;
+import java.util.Comparator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -31,8 +33,6 @@
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
-import org.apache.hadoop.yarn.factories.RecordFactory;
-import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.server.resourcemanager.resource.ResourceWeights;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.NodeType;
@@ -58,6 +58,8 @@ public class AppSchedulable extends Schedulable {
private Priority priority;
private ResourceWeights resourceWeights;
+ private RMContainerComparator comparator = new RMContainerComparator();
+
public AppSchedulable(FairScheduler scheduler, FSSchedulerApp app, FSLeafQueue queue) {
this.scheduler = scheduler;
this.app = app;
@@ -111,7 +113,10 @@ public long getStartTime() {
@Override
public Resource getResourceUsage() {
- return app.getCurrentConsumption();
+ // Here the getPreemptedResources() always return zero, except in
+ // a preemption round
+ return Resources.subtract(app.getCurrentConsumption(),
+ app.getPreemptedResources());
}
@@ -383,6 +388,27 @@ public Resource assignContainer(FSSchedulerNode node) {
return assignContainer(node, false);
}
+ /**
+ * Preempt a running container according to the priority
+ */
+ @Override
+ public RMContainer preemptContainer() {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("App " + getName() + " is going to preempt a running " +
+ "container");
+ }
+
+ RMContainer toBePreempted = null;
+ for (RMContainer container : app.getLiveContainers()) {
+ if (! app.getPreemptionContainers().contains(container) &&
+ (toBePreempted == null ||
+ comparator.compare(toBePreempted, container) > 0)) {
+ toBePreempted = container;
+ }
+ }
+ return toBePreempted;
+ }
+
/**
* Whether this app has containers requests that could be satisfied on the
* given node, if the node had full space.
@@ -407,4 +433,17 @@ public boolean hasContainerForNode(Priority prio, FSSchedulerNode node) {
Resources.lessThanOrEqual(RESOURCE_CALCULATOR, null,
anyRequest.getCapability(), node.getRMNode().getTotalCapability());
}
+
+ static class RMContainerComparator implements Comparator<RMContainer>,
+ Serializable {
+ @Override
+ public int compare(RMContainer c1, RMContainer c2) {
+ int ret = c1.getContainer().getPriority().compareTo(
+ c2.getContainer().getPriority());
+ if (ret == 0) {
+ return c2.getContainerId().compareTo(c1.getContainerId());
+ }
+ return ret;
+ }
+ }
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSLeafQueue.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSLeafQueue.java
index e842a6a3557be..fe738da7d4611 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSLeafQueue.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSLeafQueue.java
@@ -33,10 +33,10 @@
import org.apache.hadoop.yarn.api.records.QueueACL;
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ActiveUsersManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppUtils;
import org.apache.hadoop.yarn.util.resource.Resources;
-import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplication;
@Private
@Unstable
@@ -208,6 +208,36 @@ public Resource assignContainer(FSSchedulerNode node) {
return assigned;
}
+ @Override
+ public RMContainer preemptContainer() {
+ RMContainer toBePreempted = null;
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Queue " + getName() + " is going to preempt a container " +
+ "from its applications.");
+ }
+
+ // If this queue is not over its fair share, reject
+ if (!preemptContainerPreCheck()) {
+ return toBePreempted;
+ }
+
+ // Choose the app that is most over fair share
+ Comparator<Schedulable> comparator = policy.getComparator();
+ AppSchedulable candidateSched = null;
+ for (AppSchedulable sched : runnableAppScheds) {
+ if (candidateSched == null ||
+ comparator.compare(sched, candidateSched) > 0) {
+ candidateSched = sched;
+ }
+ }
+
+ // Preempt from the selected app
+ if (candidateSched != null) {
+ toBePreempted = candidateSched.preemptContainer();
+ }
+ return toBePreempted;
+ }
+
@Override
public List<FSQueue> getChildQueues() {
return new ArrayList<FSQueue>(1);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSParentQueue.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSParentQueue.java
index 427cb86457937..48db41496340c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSParentQueue.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSParentQueue.java
@@ -21,6 +21,7 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
+import java.util.Comparator;
import java.util.List;
import org.apache.commons.logging.Log;
@@ -32,6 +33,7 @@
import org.apache.hadoop.yarn.api.records.QueueACL;
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.util.resource.Resources;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ActiveUsersManager;
@@ -156,6 +158,32 @@ public Resource assignContainer(FSSchedulerNode node) {
return assigned;
}
+ @Override
+ public RMContainer preemptContainer() {
+ RMContainer toBePreempted = null;
+
+ // If this queue is not over its fair share, reject
+ if (!preemptContainerPreCheck()) {
+ return toBePreempted;
+ }
+
+ // Find the childQueue which is most over fair share
+ FSQueue candidateQueue = null;
+ Comparator<Schedulable> comparator = policy.getComparator();
+ for (FSQueue queue : childQueues) {
+ if (candidateQueue == null ||
+ comparator.compare(queue, candidateQueue) > 0) {
+ candidateQueue = queue;
+ }
+ }
+
+ // Let the selected queue choose which of its container to preempt
+ if (candidateQueue != null) {
+ toBePreempted = candidateQueue.preemptContainer();
+ }
+ return toBePreempted;
+ }
+
@Override
public List<FSQueue> getChildQueues() {
return childQueues;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSQueue.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSQueue.java
index 1e94046100ac3..716e1ee687441 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSQueue.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSQueue.java
@@ -187,4 +187,17 @@ protected boolean assignContainerPreCheck(FSSchedulerNode node) {
}
return true;
}
+
+ /**
+ * Helper method to check if the queue should preempt containers
+ *
+ * @return true if check passes (can preempt) or false otherwise
+ */
+ protected boolean preemptContainerPreCheck() {
+ if (this == scheduler.getQueueManager().getRootQueue()) {
+ return true;
+ }
+ return parent.getPolicy()
+ .checkIfUsageOverFairShare(getResourceUsage(), getFairShare());
+ }
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSSchedulerApp.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSSchedulerApp.java
index adabfefaee184..63a29e4b099a0 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSSchedulerApp.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSSchedulerApp.java
@@ -59,6 +59,8 @@ public class FSSchedulerApp extends SchedulerApplicationAttempt {
private AppSchedulable appSchedulable;
final Map<RMContainer, Long> preemptionMap = new HashMap<RMContainer, Long>();
+
+ private Resource preemptedResources = Resources.createResource(0);
public FSSchedulerApp(ApplicationAttemptId applicationAttemptId,
String user, FSLeafQueue queue, ActiveUsersManager activeUsersManager,
@@ -316,6 +318,7 @@ public synchronized void resetAllowedLocalityLevel(Priority priority,
public void addPreemption(RMContainer container, long time) {
assert preemptionMap.get(container) == null;
preemptionMap.put(container, time);
+ Resources.addTo(preemptedResources, container.getAllocatedResource());
}
public Long getContainerPreemptionTime(RMContainer container) {
@@ -330,4 +333,20 @@ public Set<RMContainer> getPreemptionContainers() {
public FSLeafQueue getQueue() {
return (FSLeafQueue)super.getQueue();
}
+
+ public Resource getPreemptedResources() {
+ return preemptedResources;
+ }
+
+ public void resetPreemptedResources() {
+ preemptedResources = Resources.createResource(0);
+ for (RMContainer container : getPreemptionContainers()) {
+ Resources.addTo(preemptedResources, container.getAllocatedResource());
+ }
+ }
+
+ public void clearPreemptedResources() {
+ preemptedResources.setMemory(0);
+ preemptedResources.setVirtualCores(0);
+ }
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairScheduler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairScheduler.java
index 830f6f7509903..6d71ea2fbb3a6 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairScheduler.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairScheduler.java
@@ -20,14 +20,11 @@
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
-import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
-import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
@@ -337,94 +334,78 @@ protected synchronized void preemptTasksIfNecessary() {
}
if (Resources.greaterThan(RESOURCE_CALCULATOR, clusterResource, resToPreempt,
Resources.none())) {
- preemptResources(queueMgr.getLeafQueues(), resToPreempt);
+ preemptResources(resToPreempt);
}
}
/**
- * Preempt a quantity of resources from a list of QueueSchedulables. The
- * policy for this is to pick apps from queues that are over their fair share,
- * but make sure that no queue is placed below its fair share in the process.
- * We further prioritize preemption by choosing containers with lowest
- * priority to preempt.
+ * Preempt a quantity of resources. Each round, we start from the root queue,
+ * level-by-level, until choosing a candidate application.
+ * The policy for prioritizing preemption for each queue depends on its
+ * SchedulingPolicy: (1) fairshare/DRF, choose the ChildSchedulable that is
+ * most over its fair share; (2) FIFO, choose the childSchedulable that is
+ * latest launched.
+ * Inside each application, we further prioritize preemption by choosing
+ * containers with lowest priority to preempt.
+ * We make sure that no queue is placed below its fair share in the process.
*/
- protected void preemptResources(Collection<FSLeafQueue> scheds,
- Resource toPreempt) {
- if (scheds.isEmpty() || Resources.equals(toPreempt, Resources.none())) {
+ protected void preemptResources(Resource toPreempt) {
+ if (Resources.equals(toPreempt, Resources.none())) {
return;
}
- Map<RMContainer, FSSchedulerApp> apps =
- new HashMap<RMContainer, FSSchedulerApp>();
- Map<RMContainer, FSLeafQueue> queues =
- new HashMap<RMContainer, FSLeafQueue>();
-
- // Collect running containers from over-scheduled queues
- List<RMContainer> runningContainers = new ArrayList<RMContainer>();
- for (FSLeafQueue sched : scheds) {
- if (Resources.greaterThan(RESOURCE_CALCULATOR, clusterResource,
- sched.getResourceUsage(), sched.getFairShare())) {
- for (AppSchedulable as : sched.getRunnableAppSchedulables()) {
- for (RMContainer c : as.getApp().getLiveContainers()) {
- runningContainers.add(c);
- apps.put(c, as.getApp());
- queues.put(c, sched);
- }
- }
- }
- }
-
- // Sort containers into reverse order of priority
- Collections.sort(runningContainers, new Comparator<RMContainer>() {
- public int compare(RMContainer c1, RMContainer c2) {
- int ret = c1.getContainer().getPriority().compareTo(
- c2.getContainer().getPriority());
- if (ret == 0) {
- return c2.getContainerId().compareTo(c1.getContainerId());
- }
- return ret;
- }
- });
-
// Scan down the list of containers we've already warned and kill them
// if we need to. Remove any containers from the list that we don't need
// or that are no longer running.
Iterator<RMContainer> warnedIter = warnedContainers.iterator();
- Set<RMContainer> preemptedThisRound = new HashSet<RMContainer>();
while (warnedIter.hasNext()) {
RMContainer container = warnedIter.next();
- if (container.getState() == RMContainerState.RUNNING &&
+ if ((container.getState() == RMContainerState.RUNNING ||
+ container.getState() == RMContainerState.ALLOCATED) &&
Resources.greaterThan(RESOURCE_CALCULATOR, clusterResource,
toPreempt, Resources.none())) {
- warnOrKillContainer(container, apps.get(container), queues.get(container));
- preemptedThisRound.add(container);
+ warnOrKillContainer(container);
Resources.subtractFrom(toPreempt, container.getContainer().getResource());
} else {
warnedIter.remove();
}
}
- // Scan down the rest of the containers until we've preempted enough, making
- // sure we don't preempt too many from any queue
- Iterator<RMContainer> runningIter = runningContainers.iterator();
- while (runningIter.hasNext() &&
- Resources.greaterThan(RESOURCE_CALCULATOR, clusterResource,
- toPreempt, Resources.none())) {
- RMContainer container = runningIter.next();
- FSLeafQueue sched = queues.get(container);
- if (!preemptedThisRound.contains(container) &&
- Resources.greaterThan(RESOURCE_CALCULATOR, clusterResource,
- sched.getResourceUsage(), sched.getFairShare())) {
- warnOrKillContainer(container, apps.get(container), sched);
-
- warnedContainers.add(container);
- Resources.subtractFrom(toPreempt, container.getContainer().getResource());
+ try {
+ // Reset preemptedResource for each app
+ for (FSLeafQueue queue : getQueueManager().getLeafQueues()) {
+ for (AppSchedulable app : queue.getRunnableAppSchedulables()) {
+ app.getApp().resetPreemptedResources();
+ }
+ }
+
+ while (Resources.greaterThan(RESOURCE_CALCULATOR, clusterResource,
+ toPreempt, Resources.none())) {
+ RMContainer container =
+ getQueueManager().getRootQueue().preemptContainer();
+ if (container == null) {
+ break;
+ } else {
+ warnOrKillContainer(container);
+ warnedContainers.add(container);
+ Resources.subtractFrom(
+ toPreempt, container.getContainer().getResource());
+ }
+ }
+ } finally {
+ // Clear preemptedResources for each app
+ for (FSLeafQueue queue : getQueueManager().getLeafQueues()) {
+ for (AppSchedulable app : queue.getRunnableAppSchedulables()) {
+ app.getApp().clearPreemptedResources();
+ }
}
}
}
- private void warnOrKillContainer(RMContainer container, FSSchedulerApp app,
- FSLeafQueue queue) {
+ private void warnOrKillContainer(RMContainer container) {
+ ApplicationAttemptId appAttemptId = container.getApplicationAttemptId();
+ FSSchedulerApp app = getSchedulerApp(appAttemptId);
+ FSLeafQueue queue = app.getQueue();
LOG.info("Preempting container (prio=" + container.getContainer().getPriority() +
"res=" + container.getContainer().getResource() +
") from queue " + queue.getName());
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/Schedulable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/Schedulable.java
index 92b6d3e71eabc..4f8ac1e63744c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/Schedulable.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/Schedulable.java
@@ -23,6 +23,7 @@
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.server.resourcemanager.resource.ResourceWeights;
+import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.util.resource.Resources;
/**
@@ -100,6 +101,11 @@ public abstract class Schedulable {
*/
public abstract Resource assignContainer(FSSchedulerNode node);
+ /**
+ * Preempt a container from this Schedulable if possible.
+ */
+ public abstract RMContainer preemptContainer();
+
/** Assign a fair share to this Schedulable. */
public void setFairShare(Resource fairShare) {
this.fairShare = fairShare;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java
index 549b85c380f61..1d77a43ce7588 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java
@@ -139,4 +139,14 @@ public static boolean isApplicableTo(SchedulingPolicy policy, byte depth) {
*/
public abstract void computeShares(
Collection<? extends Schedulable> schedulables, Resource totalResources);
+
+ /**
+ * Check if the resource usage is over the fair share under this policy
+ *
+ * @param usage {@link Resource} the resource usage
+ * @param fairShare {@link Resource} the fair share
+ * @return true if check passes (is over) or false otherwise
+ */
+ public abstract boolean checkIfUsageOverFairShare(
+ Resource usage, Resource fairShare);
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/DominantResourceFairnessPolicy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/DominantResourceFairnessPolicy.java
index f5b841772295a..4b663d95de8b9 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/DominantResourceFairnessPolicy.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/DominantResourceFairnessPolicy.java
@@ -69,6 +69,11 @@ public void computeShares(Collection<? extends Schedulable> schedulables,
}
}
+ @Override
+ public boolean checkIfUsageOverFairShare(Resource usage, Resource fairShare) {
+ return !Resources.fitsIn(usage, fairShare);
+ }
+
@Override
public void initialize(Resource clusterCapacity) {
comparator.setClusterCapacity(clusterCapacity);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/FairSharePolicy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/FairSharePolicy.java
index fbad101267697..ca7297ff46c38 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/FairSharePolicy.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/FairSharePolicy.java
@@ -119,6 +119,11 @@ public void computeShares(Collection<? extends Schedulable> schedulables,
ComputeFairShares.computeShares(schedulables, totalResources, ResourceType.MEMORY);
}
+ @Override
+ public boolean checkIfUsageOverFairShare(Resource usage, Resource fairShare) {
+ return Resources.greaterThan(RESOURCE_CALCULATOR, null, usage, fairShare);
+ }
+
@Override
public byte getApplicableDepth() {
return SchedulingPolicy.DEPTH_ANY;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/FifoPolicy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/FifoPolicy.java
index 3451cfea4c50b..d996944681157 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/FifoPolicy.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/policies/FifoPolicy.java
@@ -20,7 +20,6 @@
import java.io.Serializable;
import java.util.Collection;
import java.util.Comparator;
-import java.util.Iterator;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
@@ -88,6 +87,13 @@ public void computeShares(Collection<? extends Schedulable> schedulables,
earliest.setFairShare(Resources.clone(totalResources));
}
+ @Override
+ public boolean checkIfUsageOverFairShare(Resource usage, Resource fairShare) {
+ throw new UnsupportedOperationException(
+ "FifoPolicy doesn't support checkIfUsageOverFairshare operation, " +
+ "as FifoPolicy only works for FSLeafQueue.");
+ }
+
@Override
public byte getApplicableDepth() {
return SchedulingPolicy.DEPTH_LEAF;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FakeSchedulable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FakeSchedulable.java
index d0ba0d8e085f4..dcfc2d3aa2fc6 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FakeSchedulable.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FakeSchedulable.java
@@ -21,6 +21,7 @@
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.server.resourcemanager.resource.ResourceWeights;
+import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.util.Records;
import org.apache.hadoop.yarn.util.resource.Resources;
@@ -83,6 +84,11 @@ public Resource assignContainer(FSSchedulerNode node) {
return null;
}
+ @Override
+ public RMContainer preemptContainer() {
+ return null;
+ }
+
@Override
public Resource getDemand() {
return null;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java
index 2755ef081eae4..2de498f4f41a1 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java
@@ -1029,13 +1029,13 @@ else if (p.getName().equals("root.queueB")) {
@Test (timeout = 5000)
/**
- * Make sure containers are chosen to be preempted in the correct order. Right
- * now this means decreasing order of priority.
+ * Make sure containers are chosen to be preempted in the correct order.
*/
public void testChoiceOfPreemptedContainers() throws Exception {
conf.setLong(FairSchedulerConfiguration.PREEMPTION_INTERVAL, 5000);
- conf.setLong(FairSchedulerConfiguration.WAIT_TIME_BEFORE_KILL, 10000);
+ conf.setLong(FairSchedulerConfiguration.WAIT_TIME_BEFORE_KILL, 10000);
conf.set(FairSchedulerConfiguration.ALLOCATION_FILE + ".allocation.file", ALLOC_FILE);
+ conf.set(FairSchedulerConfiguration.USER_AS_DEFAULT_QUEUE, "false");
MockClock clock = new MockClock();
scheduler.setClock(clock);
@@ -1052,7 +1052,7 @@ public void testChoiceOfPreemptedContainers() throws Exception {
out.println("<queue name=\"queueC\">");
out.println("<weight>.25</weight>");
out.println("</queue>");
- out.println("<queue name=\"queueD\">");
+ out.println("<queue name=\"default\">");
out.println("<weight>.25</weight>");
out.println("</queue>");
out.println("</allocations>");
@@ -1060,133 +1060,132 @@ public void testChoiceOfPreemptedContainers() throws Exception {
scheduler.reinitialize(conf, resourceManager.getRMContext());
- // Create four nodes
+ // Create two nodes
RMNode node1 =
- MockNodes.newNodeInfo(1, Resources.createResource(2 * 1024, 2), 1,
+ MockNodes.newNodeInfo(1, Resources.createResource(4 * 1024, 4), 1,
"127.0.0.1");
NodeAddedSchedulerEvent nodeEvent1 = new NodeAddedSchedulerEvent(node1);
scheduler.handle(nodeEvent1);
RMNode node2 =
- MockNodes.newNodeInfo(1, Resources.createResource(2 * 1024, 2), 2,
+ MockNodes.newNodeInfo(1, Resources.createResource(4 * 1024, 4), 2,
"127.0.0.2");
NodeAddedSchedulerEvent nodeEvent2 = new NodeAddedSchedulerEvent(node2);
scheduler.handle(nodeEvent2);
- RMNode node3 =
- MockNodes.newNodeInfo(1, Resources.createResource(2 * 1024, 2), 3,
- "127.0.0.3");
- NodeAddedSchedulerEvent nodeEvent3 = new NodeAddedSchedulerEvent(node3);
- scheduler.handle(nodeEvent3);
-
-
- // Queue A and B each request three containers
+ // Queue A and B each request two applications
ApplicationAttemptId app1 =
- createSchedulingRequest(1 * 1024, "queueA", "user1", 1, 1);
+ createSchedulingRequest(1 * 1024, 1, "queueA", "user1", 1, 1);
+ createSchedulingRequestExistingApplication(1 * 1024, 1, 2, app1);
ApplicationAttemptId app2 =
- createSchedulingRequest(1 * 1024, "queueA", "user1", 1, 2);
- ApplicationAttemptId app3 =
- createSchedulingRequest(1 * 1024, "queueA", "user1", 1, 3);
+ createSchedulingRequest(1 * 1024, 1, "queueA", "user1", 1, 3);
+ createSchedulingRequestExistingApplication(1 * 1024, 1, 4, app2);
+ ApplicationAttemptId app3 =
+ createSchedulingRequest(1 * 1024, 1, "queueB", "user1", 1, 1);
+ createSchedulingRequestExistingApplication(1 * 1024, 1, 2, app3);
ApplicationAttemptId app4 =
- createSchedulingRequest(1 * 1024, "queueB", "user1", 1, 1);
- ApplicationAttemptId app5 =
- createSchedulingRequest(1 * 1024, "queueB", "user1", 1, 2);
- ApplicationAttemptId app6 =
- createSchedulingRequest(1 * 1024, "queueB", "user1", 1, 3);
+ createSchedulingRequest(1 * 1024, 1, "queueB", "user1", 1, 3);
+ createSchedulingRequestExistingApplication(1 * 1024, 1, 4, app4);
scheduler.update();
+ scheduler.getQueueManager().getLeafQueue("queueA", true)
+ .setPolicy(SchedulingPolicy.parse("fifo"));
+ scheduler.getQueueManager().getLeafQueue("queueB", true)
+ .setPolicy(SchedulingPolicy.parse("fair"));
+
// Sufficient node check-ins to fully schedule containers
- for (int i = 0; i < 2; i++) {
- NodeUpdateSchedulerEvent nodeUpdate1 = new NodeUpdateSchedulerEvent(node1);
+ NodeUpdateSchedulerEvent nodeUpdate1 = new NodeUpdateSchedulerEvent(node1);
+ NodeUpdateSchedulerEvent nodeUpdate2 = new NodeUpdateSchedulerEvent(node2);
+ for (int i = 0; i < 4; i++) {
scheduler.handle(nodeUpdate1);
-
- NodeUpdateSchedulerEvent nodeUpdate2 = new NodeUpdateSchedulerEvent(node2);
scheduler.handle(nodeUpdate2);
-
- NodeUpdateSchedulerEvent nodeUpdate3 = new NodeUpdateSchedulerEvent(node3);
- scheduler.handle(nodeUpdate3);
}
- assertEquals(1, scheduler.getSchedulerApp(app1).getLiveContainers().size());
- assertEquals(1, scheduler.getSchedulerApp(app2).getLiveContainers().size());
- assertEquals(1, scheduler.getSchedulerApp(app3).getLiveContainers().size());
- assertEquals(1, scheduler.getSchedulerApp(app4).getLiveContainers().size());
- assertEquals(1, scheduler.getSchedulerApp(app5).getLiveContainers().size());
- assertEquals(1, scheduler.getSchedulerApp(app6).getLiveContainers().size());
-
- // Now new requests arrive from queues C and D
- ApplicationAttemptId app7 =
- createSchedulingRequest(1 * 1024, "queueC", "user1", 1, 1);
- ApplicationAttemptId app8 =
- createSchedulingRequest(1 * 1024, "queueC", "user1", 1, 2);
- ApplicationAttemptId app9 =
- createSchedulingRequest(1 * 1024, "queueC", "user1", 1, 3);
-
- ApplicationAttemptId app10 =
- createSchedulingRequest(1 * 1024, "queueD", "user1", 1, 1);
- ApplicationAttemptId app11 =
- createSchedulingRequest(1 * 1024, "queueD", "user1", 1, 2);
- ApplicationAttemptId app12 =
- createSchedulingRequest(1 * 1024, "queueD", "user1", 1, 3);
+ assertEquals(2, scheduler.getSchedulerApp(app1).getLiveContainers().size());
+ assertEquals(2, scheduler.getSchedulerApp(app2).getLiveContainers().size());
+ assertEquals(2, scheduler.getSchedulerApp(app3).getLiveContainers().size());
+ assertEquals(2, scheduler.getSchedulerApp(app4).getLiveContainers().size());
+ // Now new requests arrive from queueC and default
+ createSchedulingRequest(1 * 1024, 1, "queueC", "user1", 1, 1);
+ createSchedulingRequest(1 * 1024, 1, "queueC", "user1", 1, 1);
+ createSchedulingRequest(1 * 1024, 1, "default", "user1", 1, 1);
+ createSchedulingRequest(1 * 1024, 1, "default", "user1", 1, 1);
scheduler.update();
- // We should be able to claw back one container from A and B each.
- // Make sure it is lowest priority container.
- scheduler.preemptResources(scheduler.getQueueManager().getLeafQueues(),
- Resources.createResource(2 * 1024));
- assertEquals(1, scheduler.getSchedulerApp(app1).getLiveContainers().size());
- assertEquals(1, scheduler.getSchedulerApp(app2).getLiveContainers().size());
- assertEquals(1, scheduler.getSchedulerApp(app4).getLiveContainers().size());
- assertEquals(1, scheduler.getSchedulerApp(app5).getLiveContainers().size());
-
- // First verify we are adding containers to preemption list for the application
- assertTrue(!Collections.disjoint(scheduler.getSchedulerApp(app3).getLiveContainers(),
- scheduler.getSchedulerApp(app3).getPreemptionContainers()));
- assertTrue(!Collections.disjoint(scheduler.getSchedulerApp(app6).getLiveContainers(),
- scheduler.getSchedulerApp(app6).getPreemptionContainers()));
+ // We should be able to claw back one container from queueA and queueB each.
+ scheduler.preemptResources(Resources.createResource(2 * 1024));
+ assertEquals(2, scheduler.getSchedulerApp(app1).getLiveContainers().size());
+ assertEquals(2, scheduler.getSchedulerApp(app3).getLiveContainers().size());
+
+ // First verify we are adding containers to preemption list for the app.
+ // For queueA (fifo), app2 is selected.
+ // For queueB (fair), app4 is selected.
+ assertTrue("App2 should have container to be preempted",
+ !Collections.disjoint(
+ scheduler.getSchedulerApp(app2).getLiveContainers(),
+ scheduler.getSchedulerApp(app2).getPreemptionContainers()));
+ assertTrue("App4 should have container to be preempted",
+ !Collections.disjoint(
+ scheduler.getSchedulerApp(app2).getLiveContainers(),
+ scheduler.getSchedulerApp(app2).getPreemptionContainers()));
// Pretend 15 seconds have passed
clock.tick(15);
// Trigger a kill by insisting we want containers back
- scheduler.preemptResources(scheduler.getQueueManager().getLeafQueues(),
- Resources.createResource(2 * 1024));
+ scheduler.preemptResources(Resources.createResource(2 * 1024));
// At this point the containers should have been killed (since we are not simulating AM)
- assertEquals(0, scheduler.getSchedulerApp(app6).getLiveContainers().size());
- assertEquals(0, scheduler.getSchedulerApp(app3).getLiveContainers().size());
+ assertEquals(1, scheduler.getSchedulerApp(app2).getLiveContainers().size());
+ assertEquals(1, scheduler.getSchedulerApp(app4).getLiveContainers().size());
+ // Inside each app, containers are sorted according to their priorities.
+ // Containers with priority 4 are preempted for app2 and app4.
+ Set<RMContainer> set = new HashSet<RMContainer>();
+ for (RMContainer container :
+ scheduler.getSchedulerApp(app2).getLiveContainers()) {
+ if (container.getAllocatedPriority().getPriority() == 4) {
+ set.add(container);
+ }
+ }
+ for (RMContainer container :
+ scheduler.getSchedulerApp(app4).getLiveContainers()) {
+ if (container.getAllocatedPriority().getPriority() == 4) {
+ set.add(container);
+ }
+ }
+ assertTrue("Containers with priority=4 in app2 and app4 should be " +
+ "preempted.", set.isEmpty());
// Trigger a kill by insisting we want containers back
- scheduler.preemptResources(scheduler.getQueueManager().getLeafQueues(),
- Resources.createResource(2 * 1024));
+ scheduler.preemptResources(Resources.createResource(2 * 1024));
// Pretend 15 seconds have passed
clock.tick(15);
// We should be able to claw back another container from A and B each.
- // Make sure it is lowest priority container.
- scheduler.preemptResources(scheduler.getQueueManager().getLeafQueues(),
- Resources.createResource(2 * 1024));
-
- assertEquals(1, scheduler.getSchedulerApp(app1).getLiveContainers().size());
+ // For queueA (fifo), continue preempting from app2.
+ // For queueB (fair), even app4 has a lowest priority container with p=4, it
+ // still preempts from app3 as app3 is most over fair share.
+ scheduler.preemptResources(Resources.createResource(2 * 1024));
+
+ assertEquals(2, scheduler.getSchedulerApp(app1).getLiveContainers().size());
assertEquals(0, scheduler.getSchedulerApp(app2).getLiveContainers().size());
- assertEquals(0, scheduler.getSchedulerApp(app3).getLiveContainers().size());
+ assertEquals(1, scheduler.getSchedulerApp(app3).getLiveContainers().size());
assertEquals(1, scheduler.getSchedulerApp(app4).getLiveContainers().size());
- assertEquals(0, scheduler.getSchedulerApp(app5).getLiveContainers().size());
- assertEquals(0, scheduler.getSchedulerApp(app6).getLiveContainers().size());
// Now A and B are below fair share, so preemption shouldn't do anything
- scheduler.preemptResources(scheduler.getQueueManager().getLeafQueues(),
- Resources.createResource(2 * 1024));
- assertEquals(1, scheduler.getSchedulerApp(app1).getLiveContainers().size());
- assertEquals(0, scheduler.getSchedulerApp(app2).getLiveContainers().size());
- assertEquals(0, scheduler.getSchedulerApp(app3).getLiveContainers().size());
- assertEquals(1, scheduler.getSchedulerApp(app4).getLiveContainers().size());
- assertEquals(0, scheduler.getSchedulerApp(app5).getLiveContainers().size());
- assertEquals(0, scheduler.getSchedulerApp(app6).getLiveContainers().size());
+ scheduler.preemptResources(Resources.createResource(2 * 1024));
+ assertTrue("App1 should have no container to be preempted",
+ scheduler.getSchedulerApp(app1).getPreemptionContainers().isEmpty());
+ assertTrue("App2 should have no container to be preempted",
+ scheduler.getSchedulerApp(app2).getPreemptionContainers().isEmpty());
+ assertTrue("App3 should have no container to be preempted",
+ scheduler.getSchedulerApp(app3).getPreemptionContainers().isEmpty());
+ assertTrue("App4 should have no container to be preempted",
+ scheduler.getSchedulerApp(app4).getPreemptionContainers().isEmpty());
}
@Test (timeout = 5000)
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairSchedulerPreemption.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairSchedulerPreemption.java
index b3ab299ea88ca..2098e1679b9e0 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairSchedulerPreemption.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairSchedulerPreemption.java
@@ -35,10 +35,8 @@
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
-import java.util.Collection;
import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
public class TestFairSchedulerPreemption extends FairSchedulerTestBase {
@@ -51,8 +49,7 @@ private static class StubbedFairScheduler extends FairScheduler {
public int lastPreemptMemory = -1;
@Override
- protected void preemptResources(
- Collection<FSLeafQueue> scheds, Resource toPreempt) {
+ protected void preemptResources(Resource toPreempt) {
lastPreemptMemory = toPreempt.getMemory();
}
|
20f43bf54c4cf1ccb37e69e06057ebc3cc9e1ae8
|
elasticsearch
|
add hasSingleArrayBackingStorage allow for- optimization only when there really is a single array, and not when there is- a multi dimensional one--
|
p
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefOrdValComparator.java b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefOrdValComparator.java
index 0bbfbdb28358f..9afc929bf757c 100644
--- a/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefOrdValComparator.java
+++ b/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefOrdValComparator.java
@@ -387,12 +387,14 @@ public FieldComparator<BytesRef> setNextReader(AtomicReaderContext context) thro
Object ordsStorage = docToOrd.ordinals().getBackingStorage();
FieldComparator<BytesRef> perSegComp = null;
- if (ordsStorage instanceof byte[]) {
- perSegComp = new ByteOrdComparator((byte[]) ordsStorage, termsIndex, docBase);
- } else if (ordsStorage instanceof short[]) {
- perSegComp = new ShortOrdComparator((short[]) ordsStorage, termsIndex, docBase);
- } else if (ordsStorage instanceof int[]) {
- perSegComp = new IntOrdComparator((int[]) ordsStorage, termsIndex, docBase);
+ if (docToOrd.ordinals().hasSingleArrayBackingStorage()) {
+ if (ordsStorage instanceof byte[]) {
+ perSegComp = new ByteOrdComparator((byte[]) ordsStorage, termsIndex, docBase);
+ } else if (ordsStorage instanceof short[]) {
+ perSegComp = new ShortOrdComparator((short[]) ordsStorage, termsIndex, docBase);
+ } else if (ordsStorage instanceof int[]) {
+ perSegComp = new IntOrdComparator((int[]) ordsStorage, termsIndex, docBase);
+ }
}
// Don't specialize the long[] case since it's not
// possible, ie, worse case is MAX_INT-1 docs with
diff --git a/src/main/java/org/elasticsearch/index/fielddata/ordinals/DocIdOrdinals.java b/src/main/java/org/elasticsearch/index/fielddata/ordinals/DocIdOrdinals.java
index dfc9e68dcd63d..5e8669731b960 100644
--- a/src/main/java/org/elasticsearch/index/fielddata/ordinals/DocIdOrdinals.java
+++ b/src/main/java/org/elasticsearch/index/fielddata/ordinals/DocIdOrdinals.java
@@ -38,6 +38,11 @@ public DocIdOrdinals(int numDocs) {
this.numDocs = numDocs;
}
+ @Override
+ public boolean hasSingleArrayBackingStorage() {
+ return false;
+ }
+
@Override
public Object getBackingStorage() {
return null;
diff --git a/src/main/java/org/elasticsearch/index/fielddata/ordinals/EmptyOrdinals.java b/src/main/java/org/elasticsearch/index/fielddata/ordinals/EmptyOrdinals.java
index 5049fbd1c5292..b12b0a2a80526 100644
--- a/src/main/java/org/elasticsearch/index/fielddata/ordinals/EmptyOrdinals.java
+++ b/src/main/java/org/elasticsearch/index/fielddata/ordinals/EmptyOrdinals.java
@@ -36,6 +36,11 @@ public long getMemorySizeInBytes() {
return 0;
}
+ @Override
+ public boolean hasSingleArrayBackingStorage() {
+ return false;
+ }
+
@Override
public Object getBackingStorage() {
return null;
diff --git a/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiFlatArrayOrdinals.java b/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiFlatArrayOrdinals.java
index 11a2dac53f8ba..7e1eab2620841 100644
--- a/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiFlatArrayOrdinals.java
+++ b/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiFlatArrayOrdinals.java
@@ -50,6 +50,11 @@ public MultiFlatArrayOrdinals(int[][] ordinals, int numOrds) {
this.numOrds = numOrds;
}
+ @Override
+ public boolean hasSingleArrayBackingStorage() {
+ return false;
+ }
+
@Override
public Object getBackingStorage() {
return ordinals;
diff --git a/src/main/java/org/elasticsearch/index/fielddata/ordinals/Ordinals.java b/src/main/java/org/elasticsearch/index/fielddata/ordinals/Ordinals.java
index cdbdffeb93a54..60f29de7fb02d 100644
--- a/src/main/java/org/elasticsearch/index/fielddata/ordinals/Ordinals.java
+++ b/src/main/java/org/elasticsearch/index/fielddata/ordinals/Ordinals.java
@@ -19,7 +19,6 @@
package org.elasticsearch.index.fielddata.ordinals;
-import org.elasticsearch.common.RamUsage;
import org.elasticsearch.index.fielddata.util.IntArrayRef;
/**
@@ -27,6 +26,11 @@
*/
public interface Ordinals {
+ /**
+ * Are the ordinals backed by a single ordinals array?
+ */
+ boolean hasSingleArrayBackingStorage();
+
/**
* Returns the backing storage for this ordinals.
*/
diff --git a/src/main/java/org/elasticsearch/index/fielddata/ordinals/SingleArrayOrdinals.java b/src/main/java/org/elasticsearch/index/fielddata/ordinals/SingleArrayOrdinals.java
index 60870d513a039..f84c5482e3f87 100644
--- a/src/main/java/org/elasticsearch/index/fielddata/ordinals/SingleArrayOrdinals.java
+++ b/src/main/java/org/elasticsearch/index/fielddata/ordinals/SingleArrayOrdinals.java
@@ -37,6 +37,11 @@ public SingleArrayOrdinals(int[] ordinals, int numOrds) {
this.numOrds = numOrds;
}
+ @Override
+ public boolean hasSingleArrayBackingStorage() {
+ return true;
+ }
+
@Override
public Object getBackingStorage() {
return ordinals;
diff --git a/src/main/java/org/elasticsearch/index/fielddata/ordinals/SinglePackedOrdinals.java b/src/main/java/org/elasticsearch/index/fielddata/ordinals/SinglePackedOrdinals.java
index 7b46ec122dbb7..a79f29e7e7d5d 100644
--- a/src/main/java/org/elasticsearch/index/fielddata/ordinals/SinglePackedOrdinals.java
+++ b/src/main/java/org/elasticsearch/index/fielddata/ordinals/SinglePackedOrdinals.java
@@ -38,6 +38,11 @@ public SinglePackedOrdinals(PackedInts.Reader reader, int numOrds) {
this.numOrds = numOrds;
}
+ @Override
+ public boolean hasSingleArrayBackingStorage() {
+ return reader.hasArray();
+ }
+
@Override
public Object getBackingStorage() {
if (reader.hasArray()) {
|
041af28166c270b29b51f5f42fb3269c2dbe1159
|
kotlin
|
Deprecate and don't write KotlinClass$Kind, to be- removed later--
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/backend/src/org/jetbrains/kotlin/codegen/ImplementationBodyCodegen.java b/compiler/backend/src/org/jetbrains/kotlin/codegen/ImplementationBodyCodegen.java
index 8106ba4385088..89662512950c8 100644
--- a/compiler/backend/src/org/jetbrains/kotlin/codegen/ImplementationBodyCodegen.java
+++ b/compiler/backend/src/org/jetbrains/kotlin/codegen/ImplementationBodyCodegen.java
@@ -43,7 +43,6 @@
import org.jetbrains.kotlin.lexer.JetTokens;
import org.jetbrains.kotlin.load.java.JvmAbi;
import org.jetbrains.kotlin.load.java.JvmAnnotationNames;
-import org.jetbrains.kotlin.load.java.JvmAnnotationNames.KotlinClass;
import org.jetbrains.kotlin.load.java.descriptors.JavaCallableMemberDescriptor;
import org.jetbrains.kotlin.name.FqName;
import org.jetbrains.kotlin.name.FqNameUnsafe;
@@ -256,17 +255,12 @@ protected void generateBody() {
protected void generateKotlinAnnotation() {
if (state.getClassBuilderMode() != ClassBuilderMode.FULL) return;
- KotlinClass.Kind kind;
- if (isAnonymousObject(descriptor)) {
- kind = KotlinClass.Kind.ANONYMOUS_OBJECT;
- }
- else if (isTopLevelOrInnerClass(descriptor)) {
- // Default value is Kind.CLASS
- kind = null;
- }
- else {
- // LOCAL_CLASS is also written to inner classes of local classes
- kind = KotlinClass.Kind.LOCAL_CLASS;
+ if (!isTopLevelOrInnerClass(descriptor)) {
+ AnnotationVisitor av = v.getVisitor().visitAnnotation(
+ asmDescByFqNameWithoutInnerClasses(JvmAnnotationNames.KOTLIN_LOCAL_CLASS), true
+ );
+ av.visit(JvmAnnotationNames.VERSION_FIELD_NAME, JvmAbi.VERSION.toArray());
+ av.visitEnd();
}
DescriptorSerializer serializer =
@@ -276,13 +270,6 @@ else if (isTopLevelOrInnerClass(descriptor)) {
AnnotationVisitor av = v.getVisitor().visitAnnotation(asmDescByFqNameWithoutInnerClasses(JvmAnnotationNames.KOTLIN_CLASS), true);
writeAnnotationData(av, serializer, classProto);
- if (kind != null) {
- av.visitEnum(
- JvmAnnotationNames.KIND_FIELD_NAME,
- Type.getObjectType(KotlinClass.KIND_INTERNAL_NAME).getDescriptor(),
- kind.toString()
- );
- }
av.visitEnd();
}
diff --git a/compiler/frontend.java/src/org/jetbrains/kotlin/load/kotlin/FileBasedKotlinClass.java b/compiler/frontend.java/src/org/jetbrains/kotlin/load/kotlin/FileBasedKotlinClass.java
index 67a9529da12fd..f59d77e8bf604 100644
--- a/compiler/frontend.java/src/org/jetbrains/kotlin/load/kotlin/FileBasedKotlinClass.java
+++ b/compiler/frontend.java/src/org/jetbrains/kotlin/load/kotlin/FileBasedKotlinClass.java
@@ -270,9 +270,6 @@ private static ClassId resolveNameByInternalName(@NotNull String name, @NotNull
if (name.equals(JvmAnnotationNames.KotlinSyntheticClass.KIND_INTERNAL_NAME)) {
return JvmAnnotationNames.KotlinSyntheticClass.KIND_CLASS_ID;
}
- else if (name.equals(JvmAnnotationNames.KotlinClass.KIND_INTERNAL_NAME)) {
- return JvmAnnotationNames.KotlinClass.KIND_CLASS_ID;
- }
List<String> classes = new ArrayList<String>(1);
boolean local = false;
diff --git a/compiler/testData/codegen/bytecodeListing/annotations/literals.txt b/compiler/testData/codegen/bytecodeListing/annotations/literals.txt
index 7ceb93edb710b..6666962805c18 100644
--- a/compiler/testData/codegen/bytecodeListing/annotations/literals.txt
+++ b/compiler/testData/codegen/bytecodeListing/annotations/literals.txt
@@ -20,7 +20,7 @@
method <init>(p0: int): void
}
[email protected] LiteralsKt$foo$3 {
[email protected] @kotlin.jvm.internal.KotlinClass LiteralsKt$foo$3 {
inner class LiteralsKt$foo$3
field $kotlinClass: kotlin.reflect.KClass
method <clinit>(): void
diff --git a/compiler/tests/org/jetbrains/kotlin/codegen/InlineTestUtil.kt b/compiler/tests/org/jetbrains/kotlin/codegen/InlineTestUtil.kt
index 3c09b0e4d375d..ccb23e253a03a 100644
--- a/compiler/tests/org/jetbrains/kotlin/codegen/InlineTestUtil.kt
+++ b/compiler/tests/org/jetbrains/kotlin/codegen/InlineTestUtil.kt
@@ -185,7 +185,7 @@ public object InlineTestUtil {
}
private fun isClassOrPackagePartKind(header: KotlinClassHeader): Boolean {
- return header.classKind == JvmAnnotationNames.KotlinClass.Kind.CLASS || header.isInterfaceDefaultImpls
+ return (header.kind == KotlinClassHeader.Kind.CLASS && !header.isLocalClass) || header.isInterfaceDefaultImpls
}
private fun getClassHeader(file: OutputFile): KotlinClassHeader {
diff --git a/compiler/tests/org/jetbrains/kotlin/codegen/KotlinSyntheticClassAnnotationTest.java b/compiler/tests/org/jetbrains/kotlin/codegen/KotlinSyntheticClassAnnotationTest.java
index c02e8d2ff2afd..9ec16af4125cf 100644
--- a/compiler/tests/org/jetbrains/kotlin/codegen/KotlinSyntheticClassAnnotationTest.java
+++ b/compiler/tests/org/jetbrains/kotlin/codegen/KotlinSyntheticClassAnnotationTest.java
@@ -19,14 +19,12 @@
import com.google.common.base.Predicate;
import com.google.common.collect.Collections2;
import org.jetbrains.annotations.NotNull;
-import org.jetbrains.annotations.Nullable;
import org.jetbrains.kotlin.backend.common.output.OutputFile;
import org.jetbrains.kotlin.load.java.AbiVersionUtil;
import org.jetbrains.kotlin.load.java.JvmAbi;
-import org.jetbrains.kotlin.load.java.JvmAnnotationNames.KotlinClass;
+import org.jetbrains.kotlin.load.java.JvmAnnotationNames;
import org.jetbrains.kotlin.load.java.JvmAnnotationNames.KotlinSyntheticClass;
import org.jetbrains.kotlin.name.FqName;
-import org.jetbrains.kotlin.resolve.jvm.JvmClassName;
import org.jetbrains.kotlin.serialization.deserialization.BinaryVersion;
import org.jetbrains.kotlin.test.ConfigurationKind;
@@ -34,9 +32,6 @@
import java.util.Collection;
import java.util.List;
-import static org.jetbrains.kotlin.load.java.JvmAnnotationNames.KIND_FIELD_NAME;
-import static org.jetbrains.kotlin.load.java.JvmAnnotationNames.KotlinClass.Kind.ANONYMOUS_OBJECT;
-import static org.jetbrains.kotlin.load.java.JvmAnnotationNames.KotlinClass.Kind.LOCAL_CLASS;
import static org.jetbrains.kotlin.load.java.JvmAnnotationNames.VERSION_FIELD_NAME;
public class KotlinSyntheticClassAnnotationTest extends CodegenTestCase {
@@ -93,8 +88,7 @@ public void testAnonymousFunction() {
public void testLocalClass() {
doTestKotlinClass(
"fun foo() { class Local }",
- "Local",
- LOCAL_CLASS
+ "Local"
);
}
@@ -108,24 +102,21 @@ public void testLocalTraitImpl() {
public void testLocalTraitInterface() {
doTestKotlinClass(
"fun foo() { interface Local { fun bar() = 42 } }",
- "Local.class",
- LOCAL_CLASS
+ "Local.class"
);
}
public void testInnerClassOfLocalClass() {
doTestKotlinClass(
"fun foo() { class Local { inner class Inner } }",
- "Inner",
- LOCAL_CLASS
+ "Inner"
);
}
public void testAnonymousObject() {
doTestKotlinClass(
"val o = object {}",
- "$1",
- ANONYMOUS_OBJECT
+ "$1"
);
}
@@ -138,22 +129,17 @@ public void testWhenMappings() {
}
private void doTestKotlinSyntheticClass(@NotNull String code, @NotNull String classFilePart) {
- doTest(code, classFilePart, KotlinSyntheticClass.CLASS_NAME, null);
+ doTest(code, classFilePart, KotlinSyntheticClass.CLASS_NAME.getFqNameForClassNameWithoutDollars());
}
- private void doTestKotlinClass(
- @NotNull String code,
- @NotNull String classFilePart,
- @NotNull KotlinClass.Kind expectedKind
- ) {
- doTest(code, classFilePart, KotlinClass.CLASS_NAME, expectedKind.toString());
+ private void doTestKotlinClass(@NotNull String code, @NotNull String classFilePart) {
+ doTest(code, classFilePart, JvmAnnotationNames.KOTLIN_CLASS, JvmAnnotationNames.KOTLIN_LOCAL_CLASS);
}
private void doTest(
@NotNull String code,
@NotNull final String classFilePart,
- @NotNull JvmClassName annotationName,
- @Nullable String expectedKind
+ @NotNull FqName... annotationFqNames
) {
loadText("package " + PACKAGE_NAME + "\n\n" + code);
List<OutputFile> output = generateClassesInFile().asList();
@@ -169,14 +155,12 @@ public boolean apply(OutputFile file) {
String path = files.iterator().next().getRelativePath();
String fqName = path.substring(0, path.length() - ".class".length()).replace('/', '.');
Class<?> aClass = generateClass(fqName);
- assertAnnotatedWithKind(aClass, annotationName.getFqNameForClassNameWithoutDollars().asString(), expectedKind);
+ for (FqName annotationFqName : annotationFqNames) {
+ assertAnnotatedWith(aClass, annotationFqName.asString());
+ }
}
- private void assertAnnotatedWithKind(
- @NotNull Class<?> aClass,
- @NotNull String annotationFqName,
- @Nullable String expectedKind
- ) {
+ private void assertAnnotatedWith(@NotNull Class<?> aClass, @NotNull String annotationFqName) {
Class<? extends Annotation> annotationClass = loadAnnotationClassQuietly(annotationFqName);
assertTrue("No annotation " + annotationFqName + " found in " + aClass, aClass.isAnnotationPresent(annotationClass));
@@ -186,9 +170,5 @@ private void assertAnnotatedWithKind(
assertNotNull(version);
assertTrue("Annotation " + annotationFqName + " is written with an unsupported format",
AbiVersionUtil.isAbiVersionCompatible(BinaryVersion.create(version)));
-
- Object actualKind = CodegenTestUtil.getAnnotationAttribute(annotation, KIND_FIELD_NAME);
- assertNotNull(actualKind);
- assertEquals("Annotation " + annotationFqName + " has the wrong kind", expectedKind, actualKind.toString());
}
}
diff --git a/compiler/tests/org/jetbrains/kotlin/jvm/runtime/AbstractJvmRuntimeDescriptorLoaderTest.kt b/compiler/tests/org/jetbrains/kotlin/jvm/runtime/AbstractJvmRuntimeDescriptorLoaderTest.kt
index 44836798c9779..21dc941dda869 100644
--- a/compiler/tests/org/jetbrains/kotlin/jvm/runtime/AbstractJvmRuntimeDescriptorLoaderTest.kt
+++ b/compiler/tests/org/jetbrains/kotlin/jvm/runtime/AbstractJvmRuntimeDescriptorLoaderTest.kt
@@ -150,11 +150,10 @@ public abstract class AbstractJvmRuntimeDescriptorLoaderTest : TestCaseWithTmpdi
val packageView = module.getPackage(LoadDescriptorUtil.TEST_PACKAGE_FQNAME)
packageScopes.add(packageView.memberScope)
}
- else if (header == null ||
- (header.kind == KotlinClassHeader.Kind.CLASS && header.classKind == JvmAnnotationNames.KotlinClass.Kind.CLASS)) {
+ else if (header == null || (header.kind == KotlinClassHeader.Kind.CLASS && !header.isLocalClass)) {
// Either a normal Kotlin class or a Java class
val classId = klass.classId
- if (!classId.isLocal()) {
+ if (!classId.isLocal) {
val classDescriptor = module.findClassAcrossModuleDependencies(classId).sure { "Couldn't resolve class $className" }
if (DescriptorUtils.isTopLevelDeclaration(classDescriptor)) {
classes.add(classDescriptor)
diff --git a/compiler/tests/org/jetbrains/kotlin/serialization/AbstractLocalClassProtoTest.kt b/compiler/tests/org/jetbrains/kotlin/serialization/AbstractLocalClassProtoTest.kt
index fbd1246cba2b0..f1899b6a4fc20 100644
--- a/compiler/tests/org/jetbrains/kotlin/serialization/AbstractLocalClassProtoTest.kt
+++ b/compiler/tests/org/jetbrains/kotlin/serialization/AbstractLocalClassProtoTest.kt
@@ -76,17 +76,13 @@ public abstract class AbstractLocalClassProtoTest : TestCaseWithTmpdir() {
)
}
+ @Suppress("UNCHECKED_CAST")
private fun assertHasAnnotationData(clazz: Class<*>) {
- @Suppress("UNCHECKED_CAST")
- val annotation = clazz.getAnnotation(
+ checkNotNull(clazz.getAnnotation(
clazz.classLoader.loadClass(JvmAnnotationNames.KOTLIN_CLASS.asString()) as Class<Annotation>
- )
- assert(annotation != null) { "KotlinClass annotation is not found for class $clazz" }
-
- val kindMethod = annotation.annotationType().getDeclaredMethod("kind")
- val kind = kindMethod(annotation)
- assert(kind.toString() != JvmAnnotationNames.KotlinClass.Kind.CLASS.toString()) {
- "'kind' should not be CLASS: $clazz (was $kind)"
- }
+ )) { "KotlinClass annotation is not found for class $clazz" }
+ checkNotNull(clazz.getAnnotation(
+ clazz.classLoader.loadClass(JvmAnnotationNames.KOTLIN_LOCAL_CLASS.asString()) as Class<Annotation>
+ )) { "KotlinLocalClass annotation is not found for class $clazz" }
}
}
diff --git a/core/descriptor.loader.java/src/org/jetbrains/kotlin/load/java/JvmAnnotationNames.java b/core/descriptor.loader.java/src/org/jetbrains/kotlin/load/java/JvmAnnotationNames.java
index c690ccb0d1496..4c28d1f533236 100644
--- a/core/descriptor.loader.java/src/org/jetbrains/kotlin/load/java/JvmAnnotationNames.java
+++ b/core/descriptor.loader.java/src/org/jetbrains/kotlin/load/java/JvmAnnotationNames.java
@@ -28,13 +28,14 @@
import java.util.Set;
public final class JvmAnnotationNames {
- public static final FqName KOTLIN_CLASS = KotlinClass.CLASS_NAME.getFqNameForClassNameWithoutDollars();
+ public static final FqName KOTLIN_CLASS = new FqName("kotlin.jvm.internal.KotlinClass");
public static final FqName KOTLIN_PACKAGE = new FqName("kotlin.jvm.internal.KotlinPackage");
public static final FqName KOTLIN_FILE_FACADE = new FqName("kotlin.jvm.internal.KotlinFileFacade");
public static final FqName KOTLIN_MULTIFILE_CLASS = new FqName("kotlin.jvm.internal.KotlinMultifileClass");
public static final FqName KOTLIN_MULTIFILE_CLASS_PART = new FqName("kotlin.jvm.internal.KotlinMultifileClassPart");
public static final FqName KOTLIN_CALLABLE = new FqName("kotlin.jvm.internal.KotlinCallable");
public static final FqName KOTLIN_INTERFACE_DEFAULT_IMPLS = new FqName("kotlin.jvm.internal.KotlinInterfaceDefaultImpls");
+ public static final FqName KOTLIN_LOCAL_CLASS = new FqName("kotlin.jvm.internal.KotlinLocalClass");
public static final FqName JAVA_LANG_DEPRECATED = new FqName("java.lang.Deprecated");
@@ -68,23 +69,6 @@ public final class JvmAnnotationNames {
public static final FqName ENHANCED_NULLABILITY_ANNOTATION = new FqName("kotlin.jvm.internal.EnhancedNullability");
public static final FqName ENHANCED_MUTABILITY_ANNOTATION = new FqName("kotlin.jvm.internal.EnhancedMutability");
- public static class KotlinClass {
- public static final JvmClassName CLASS_NAME = JvmClassName.byInternalName("kotlin/jvm/internal/KotlinClass");
- public static final ClassId KIND_CLASS_ID =
- ClassId.topLevel(CLASS_NAME.getFqNameForClassNameWithoutDollars()).createNestedClassId(Name.identifier("Kind"));
- public static final String KIND_INTERNAL_NAME = JvmClassName.byClassId(KIND_CLASS_ID).getInternalName();
-
- /**
- * This enum duplicates {@link kotlin.jvm.internal.KotlinClass.Kind}. Both places should be updated simultaneously.
- */
- public enum Kind {
- CLASS,
- LOCAL_CLASS,
- ANONYMOUS_OBJECT,
- ;
- }
- }
-
public static class KotlinSyntheticClass {
public static final JvmClassName CLASS_NAME = JvmClassName.byInternalName("kotlin/jvm/internal/KotlinSyntheticClass");
public static final ClassId KIND_CLASS_ID =
@@ -125,6 +109,7 @@ public static class KotlinSyntheticClass {
}
SPECIAL_ANNOTATIONS.add(KotlinSyntheticClass.CLASS_NAME);
SPECIAL_ANNOTATIONS.add(JvmClassName.byFqNameWithoutInnerClasses(KOTLIN_INTERFACE_DEFAULT_IMPLS));
+ SPECIAL_ANNOTATIONS.add(JvmClassName.byFqNameWithoutInnerClasses(KOTLIN_LOCAL_CLASS));
for (FqName fqName : Arrays.asList(JETBRAINS_NOT_NULL_ANNOTATION, JETBRAINS_NULLABLE_ANNOTATION)) {
NULLABILITY_ANNOTATIONS.add(JvmClassName.byFqNameWithoutInnerClasses(fqName));
diff --git a/core/descriptor.loader.java/src/org/jetbrains/kotlin/load/kotlin/header/KotlinClassHeader.kt b/core/descriptor.loader.java/src/org/jetbrains/kotlin/load/kotlin/header/KotlinClassHeader.kt
index f12bc55a7a6fc..dadad589ede50 100644
--- a/core/descriptor.loader.java/src/org/jetbrains/kotlin/load/kotlin/header/KotlinClassHeader.kt
+++ b/core/descriptor.loader.java/src/org/jetbrains/kotlin/load/kotlin/header/KotlinClassHeader.kt
@@ -17,24 +17,22 @@
package org.jetbrains.kotlin.load.kotlin.header
import org.jetbrains.kotlin.load.java.AbiVersionUtil
-import org.jetbrains.kotlin.load.java.JvmAnnotationNames.KotlinClass
-import org.jetbrains.kotlin.load.java.JvmAnnotationNames.KotlinSyntheticClass
import org.jetbrains.kotlin.serialization.deserialization.BinaryVersion
-public class KotlinClassHeader(
- public val kind: KotlinClassHeader.Kind,
- public val version: BinaryVersion,
- public val annotationData: Array<String>?,
- public val strings: Array<String>?,
- public val classKind: KotlinClass.Kind?,
- public val syntheticClassKind: String?,
- public val filePartClassNames: Array<String>?,
- public val multifileClassName: String?,
- public val isInterfaceDefaultImpls: Boolean
+class KotlinClassHeader(
+ val kind: KotlinClassHeader.Kind,
+ val version: BinaryVersion,
+ val annotationData: Array<String>?,
+ val strings: Array<String>?,
+ val syntheticClassKind: String?,
+ val filePartClassNames: Array<String>?,
+ val multifileClassName: String?,
+ val isInterfaceDefaultImpls: Boolean,
+ val isLocalClass: Boolean
) {
- public val isCompatibleAbiVersion: Boolean get() = AbiVersionUtil.isAbiVersionCompatible(version)
+ val isCompatibleAbiVersion: Boolean get() = AbiVersionUtil.isAbiVersionCompatible(version)
- public enum class Kind {
+ enum class Kind {
CLASS,
PACKAGE_FACADE,
FILE_FACADE,
@@ -45,13 +43,13 @@ public class KotlinClassHeader(
override fun toString() =
"$kind " +
- (if (classKind != null) "$classKind " else "") +
+ (if (isLocalClass) "(local) " else "") +
(if (syntheticClassKind != null) "$syntheticClassKind " else "") +
"version=$version"
}
-public fun KotlinClassHeader.isCompatibleClassKind(): Boolean = isCompatibleAbiVersion && kind == KotlinClassHeader.Kind.CLASS
-public fun KotlinClassHeader.isCompatiblePackageFacadeKind(): Boolean = isCompatibleAbiVersion && kind == KotlinClassHeader.Kind.PACKAGE_FACADE
-public fun KotlinClassHeader.isCompatibleFileFacadeKind(): Boolean = isCompatibleAbiVersion && kind == KotlinClassHeader.Kind.FILE_FACADE
-public fun KotlinClassHeader.isCompatibleMultifileClassKind(): Boolean = isCompatibleAbiVersion && kind == KotlinClassHeader.Kind.MULTIFILE_CLASS
-public fun KotlinClassHeader.isCompatibleMultifileClassPartKind(): Boolean = isCompatibleAbiVersion && kind == KotlinClassHeader.Kind.MULTIFILE_CLASS_PART
+fun KotlinClassHeader.isCompatibleClassKind(): Boolean = isCompatibleAbiVersion && kind == KotlinClassHeader.Kind.CLASS
+fun KotlinClassHeader.isCompatiblePackageFacadeKind(): Boolean = isCompatibleAbiVersion && kind == KotlinClassHeader.Kind.PACKAGE_FACADE
+fun KotlinClassHeader.isCompatibleFileFacadeKind(): Boolean = isCompatibleAbiVersion && kind == KotlinClassHeader.Kind.FILE_FACADE
+fun KotlinClassHeader.isCompatibleMultifileClassKind(): Boolean = isCompatibleAbiVersion && kind == KotlinClassHeader.Kind.MULTIFILE_CLASS
+fun KotlinClassHeader.isCompatibleMultifileClassPartKind(): Boolean = isCompatibleAbiVersion && kind == KotlinClassHeader.Kind.MULTIFILE_CLASS_PART
diff --git a/core/descriptor.loader.java/src/org/jetbrains/kotlin/load/kotlin/header/ReadKotlinClassHeaderAnnotationVisitor.java b/core/descriptor.loader.java/src/org/jetbrains/kotlin/load/kotlin/header/ReadKotlinClassHeaderAnnotationVisitor.java
index ecbb2d71effe9..384d96eafee46 100644
--- a/core/descriptor.loader.java/src/org/jetbrains/kotlin/load/kotlin/header/ReadKotlinClassHeaderAnnotationVisitor.java
+++ b/core/descriptor.loader.java/src/org/jetbrains/kotlin/load/kotlin/header/ReadKotlinClassHeaderAnnotationVisitor.java
@@ -21,6 +21,7 @@
import org.jetbrains.kotlin.descriptors.SourceElement;
import org.jetbrains.kotlin.load.java.AbiVersionUtil;
import org.jetbrains.kotlin.name.ClassId;
+import org.jetbrains.kotlin.name.FqName;
import org.jetbrains.kotlin.name.Name;
import org.jetbrains.kotlin.resolve.jvm.JvmClassName;
import org.jetbrains.kotlin.serialization.deserialization.BinaryVersion;
@@ -39,7 +40,7 @@ public class ReadKotlinClassHeaderAnnotationVisitor implements AnnotationVisitor
private static final Map<JvmClassName, KotlinClassHeader.Kind> OLD_DEPRECATED_ANNOTATIONS_KINDS = new HashMap<JvmClassName, KotlinClassHeader.Kind>();
static {
- HEADER_KINDS.put(KotlinClass.CLASS_NAME, CLASS);
+ HEADER_KINDS.put(JvmClassName.byFqNameWithoutInnerClasses(KOTLIN_CLASS), CLASS);
HEADER_KINDS.put(JvmClassName.byFqNameWithoutInnerClasses(KOTLIN_PACKAGE), PACKAGE_FACADE);
HEADER_KINDS.put(JvmClassName.byFqNameWithoutInnerClasses(KOTLIN_FILE_FACADE), FILE_FACADE);
HEADER_KINDS.put(JvmClassName.byFqNameWithoutInnerClasses(KOTLIN_MULTIFILE_CLASS), MULTIFILE_CLASS);
@@ -66,9 +67,9 @@ private static void initOldAnnotations() {
private String[] annotationData = null;
private String[] strings = null;
private KotlinClassHeader.Kind headerKind = null;
- private KotlinClass.Kind classKind = null;
private String syntheticClassKind = null;
private boolean isInterfaceDefaultImpls = false;
+ private boolean isLocalClass = false;
@Nullable
public KotlinClassHeader createHeader() {
@@ -76,11 +77,6 @@ public KotlinClassHeader createHeader() {
return null;
}
- if (headerKind == CLASS && classKind == null) {
- // Default class kind is Kind.CLASS
- classKind = KotlinClass.Kind.CLASS;
- }
-
if (!AbiVersionUtil.isAbiVersionCompatible(version)) {
annotationData = null;
}
@@ -91,8 +87,8 @@ else if (shouldHaveData() && annotationData == null) {
}
return new KotlinClassHeader(
- headerKind, version, annotationData, strings, classKind, syntheticClassKind, filePartClassNames, multifileClassName,
- isInterfaceDefaultImpls
+ headerKind, version, annotationData, strings, syntheticClassKind, filePartClassNames, multifileClassName,
+ isInterfaceDefaultImpls, isLocalClass
);
}
@@ -106,10 +102,15 @@ private boolean shouldHaveData() {
@Nullable
@Override
public AnnotationArgumentVisitor visitAnnotation(@NotNull ClassId classId, @NotNull SourceElement source) {
- if (KOTLIN_INTERFACE_DEFAULT_IMPLS.equals(classId.asSingleFqName())) {
+ FqName fqName = classId.asSingleFqName();
+ if (KOTLIN_INTERFACE_DEFAULT_IMPLS.equals(fqName)) {
isInterfaceDefaultImpls = true;
return null;
}
+ else if (KOTLIN_LOCAL_CLASS.equals(fqName)) {
+ isLocalClass = true;
+ return null;
+ }
if (headerKind != null) {
// Ignore all Kotlin annotations except the first found
@@ -269,14 +270,7 @@ public void visitEnd() {
private class ClassHeaderReader extends HeaderAnnotationArgumentVisitor {
public ClassHeaderReader() {
- super(KotlinClass.CLASS_NAME);
- }
-
- @Override
- public void visitEnum(@NotNull Name name, @NotNull ClassId enumClassId, @NotNull Name enumEntryName) {
- if (KotlinClass.KIND_CLASS_ID.equals(enumClassId) && KIND_FIELD_NAME.equals(name.asString())) {
- classKind = valueOfOrNull(KotlinClass.Kind.class, enumEntryName.asString());
- }
+ super(JvmClassName.byFqNameWithoutInnerClasses(KOTLIN_CLASS));
}
}
@@ -316,16 +310,4 @@ public void visitEnum(@NotNull Name name, @NotNull ClassId enumClassId, @NotNull
}
}
}
-
- // This function is needed here because Enum.valueOf() throws exception if there's no such value,
- // but we don't want to fail if we're loading the header with an _incompatible_ ABI version
- @Nullable
- private static <E extends Enum<E>> E valueOfOrNull(@NotNull Class<E> enumClass, @NotNull String entry) {
- try {
- return Enum.valueOf(enumClass, entry);
- }
- catch (IllegalArgumentException e) {
- return null;
- }
- }
}
diff --git a/core/runtime.jvm/src/kotlin/jvm/internal/KotlinClass.java b/core/runtime.jvm/src/kotlin/jvm/internal/KotlinClass.java
index 77270d3e02bad..b54eaa1db1586 100644
--- a/core/runtime.jvm/src/kotlin/jvm/internal/KotlinClass.java
+++ b/core/runtime.jvm/src/kotlin/jvm/internal/KotlinClass.java
@@ -29,12 +29,14 @@
int[] version() default {};
- Kind kind() default Kind.CLASS;
-
String[] data();
String[] strings();
+ @Deprecated
+ Kind kind() default Kind.CLASS;
+
+ @Deprecated
enum Kind {
CLASS,
diff --git a/core/runtime.jvm/src/kotlin/jvm/internal/KotlinLocalClass.java b/core/runtime.jvm/src/kotlin/jvm/internal/KotlinLocalClass.java
new file mode 100644
index 0000000000000..8086c610a9fa3
--- /dev/null
+++ b/core/runtime.jvm/src/kotlin/jvm/internal/KotlinLocalClass.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2010-2015 JetBrains s.r.o.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package kotlin.jvm.internal;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+public @interface KotlinLocalClass {
+ int[] version() default {};
+}
diff --git a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/decompiler/DecompiledUtils.kt b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/decompiler/DecompiledUtils.kt
index 091bf3d523067..ef6644f7dae19 100644
--- a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/decompiler/DecompiledUtils.kt
+++ b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/decompiler/DecompiledUtils.kt
@@ -21,7 +21,6 @@ import com.intellij.openapi.vfs.VirtualFile
import com.intellij.psi.ClassFileViewProvider
import org.jetbrains.kotlin.idea.caches.JarUserDataManager
import org.jetbrains.kotlin.idea.decompiler.textBuilder.DirectoryBasedClassFinder
-import org.jetbrains.kotlin.load.java.JvmAnnotationNames.KotlinClass
import org.jetbrains.kotlin.load.kotlin.KotlinBinaryClassCache
import org.jetbrains.kotlin.load.kotlin.KotlinJvmBinaryClass
import org.jetbrains.kotlin.load.kotlin.header.KotlinClassHeader
@@ -71,9 +70,8 @@ public fun isKotlinInternalCompiledFile(file: VirtualFile): Boolean {
val header = KotlinBinaryClassCache.getKotlinBinaryClass(file)?.classHeader ?: return false
return header.kind == KotlinClassHeader.Kind.SYNTHETIC_CLASS ||
- (header.kind == KotlinClassHeader.Kind.CLASS && header.classKind != null && header.classKind != KotlinClass.Kind.CLASS) ||
header.kind == KotlinClassHeader.Kind.MULTIFILE_CLASS_PART ||
- header.syntheticClassKind == "PACKAGE_PART"
+ header.isLocalClass || header.syntheticClassKind == "PACKAGE_PART"
}
public fun isKotlinJavaScriptInternalCompiledFile(file: VirtualFile): Boolean =
diff --git a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/decompiler/stubBuilder/KotlinClsStubBuilder.kt b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/decompiler/stubBuilder/KotlinClsStubBuilder.kt
index 997e8179139b3..fc5cb8d7fd70a 100644
--- a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/decompiler/stubBuilder/KotlinClsStubBuilder.kt
+++ b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/decompiler/stubBuilder/KotlinClsStubBuilder.kt
@@ -27,7 +27,6 @@ import org.jetbrains.kotlin.idea.decompiler.isKotlinInternalCompiledFile
import org.jetbrains.kotlin.idea.decompiler.textBuilder.DirectoryBasedClassFinder
import org.jetbrains.kotlin.idea.decompiler.textBuilder.DirectoryBasedDataFinder
import org.jetbrains.kotlin.idea.decompiler.textBuilder.LoggingErrorReporter
-import org.jetbrains.kotlin.load.java.JvmAnnotationNames
import org.jetbrains.kotlin.load.kotlin.KotlinBinaryClassCache
import org.jetbrains.kotlin.load.kotlin.header.isCompatibleClassKind
import org.jetbrains.kotlin.load.kotlin.header.isCompatibleFileFacadeKind
@@ -82,7 +81,7 @@ public open class KotlinClsStubBuilder : ClsStubBuilder() {
createPackageFacadeStub(packageProto, packageFqName, context)
}
header.isCompatibleClassKind() -> {
- if (header.classKind != JvmAnnotationNames.KotlinClass.Kind.CLASS) return null
+ if (header.isLocalClass) return null
val (nameResolver, classProto) = JvmProtoBufUtil.readClassDataFrom(annotationData, strings)
val context = components.createContext(nameResolver, packageFqName)
createTopLevelClassStub(classId, classProto, context)
diff --git a/idea/tests/org/jetbrains/kotlin/idea/decompiler/AbstractInternalCompiledClassesTest.kt b/idea/tests/org/jetbrains/kotlin/idea/decompiler/AbstractInternalCompiledClassesTest.kt
index 974a0a30eaa60..ffae3ec46d1fe 100644
--- a/idea/tests/org/jetbrains/kotlin/idea/decompiler/AbstractInternalCompiledClassesTest.kt
+++ b/idea/tests/org/jetbrains/kotlin/idea/decompiler/AbstractInternalCompiledClassesTest.kt
@@ -20,7 +20,6 @@ import com.intellij.openapi.vfs.VirtualFile
import com.intellij.psi.PsiManager
import org.jetbrains.kotlin.idea.decompiler.navigation.NavigateToDecompiledLibraryTest
import org.jetbrains.kotlin.idea.test.JetLightCodeInsightFixtureTestCase
-import org.jetbrains.kotlin.load.java.JvmAnnotationNames.KotlinClass
import org.jetbrains.kotlin.load.kotlin.KotlinBinaryClassCache
import org.jetbrains.kotlin.load.kotlin.header.KotlinClassHeader
import org.junit.Assert
@@ -34,11 +33,8 @@ public abstract class AbstractInternalCompiledClassesTest : JetLightCodeInsightF
protected fun isSyntheticClass(): VirtualFile.() -> Boolean =
isFileWithHeader { it.kind == KotlinClassHeader.Kind.SYNTHETIC_CLASS }
- private fun isClassOfKind(kind: KotlinClass.Kind): VirtualFile.() -> Boolean =
- isFileWithHeader { it.classKind == kind }
-
- protected fun doTestNoPsiFilesAreBuiltForLocalClass(kind: KotlinClass.Kind): Unit =
- doTestNoPsiFilesAreBuiltFor(kind.name(), isClassOfKind(kind))
+ protected fun doTestNoPsiFilesAreBuiltForLocalClass(): Unit =
+ doTestNoPsiFilesAreBuiltFor("local", isFileWithHeader { it.isLocalClass })
protected fun doTestNoPsiFilesAreBuiltForSyntheticClasses(): Unit =
doTestNoPsiFilesAreBuiltFor("synthetic", isSyntheticClass())
diff --git a/idea/tests/org/jetbrains/kotlin/idea/decompiler/InternalCompiledClassesTest.kt b/idea/tests/org/jetbrains/kotlin/idea/decompiler/InternalCompiledClassesTest.kt
index 37b930949c5cd..5c372773f053c 100644
--- a/idea/tests/org/jetbrains/kotlin/idea/decompiler/InternalCompiledClassesTest.kt
+++ b/idea/tests/org/jetbrains/kotlin/idea/decompiler/InternalCompiledClassesTest.kt
@@ -20,17 +20,13 @@ import com.intellij.psi.ClassFileViewProvider
import com.intellij.testFramework.LightProjectDescriptor
import org.jetbrains.kotlin.idea.test.JdkAndMockLibraryProjectDescriptor
import org.jetbrains.kotlin.idea.test.PluginTestCaseBase
-import org.jetbrains.kotlin.load.java.JvmAnnotationNames.KotlinClass.Kind.ANONYMOUS_OBJECT
-import org.jetbrains.kotlin.load.java.JvmAnnotationNames.KotlinClass.Kind.LOCAL_CLASS
public class InternalCompiledClassesTest : AbstractInternalCompiledClassesTest() {
private val TEST_DATA_PATH = PluginTestCaseBase.getTestDataPathBase() + "/decompiler/internalClasses"
fun testSyntheticClassesAreInvisible() = doTestNoPsiFilesAreBuiltForSyntheticClasses()
- fun testLocalClassIsInvisible() = doTestNoPsiFilesAreBuiltForLocalClass(LOCAL_CLASS)
-
- fun testAnonymousObjectIsInvisible() = doTestNoPsiFilesAreBuiltForLocalClass(ANONYMOUS_OBJECT)
+ fun testLocalClassesAreInvisible() = doTestNoPsiFilesAreBuiltForLocalClass()
fun testInnerClassIsInvisible() = doTestNoPsiFilesAreBuiltFor("inner or nested class") {
ClassFileViewProvider.isInnerClass(this)
diff --git a/jps-plugin/src/org/jetbrains/kotlin/jps/incremental/IncrementalCacheImpl.kt b/jps-plugin/src/org/jetbrains/kotlin/jps/incremental/IncrementalCacheImpl.kt
index 6a1573694f045..093349ccec181 100644
--- a/jps-plugin/src/org/jetbrains/kotlin/jps/incremental/IncrementalCacheImpl.kt
+++ b/jps-plugin/src/org/jetbrains/kotlin/jps/incremental/IncrementalCacheImpl.kt
@@ -31,7 +31,6 @@ import org.jetbrains.kotlin.jps.build.GeneratedJvmClass
import org.jetbrains.kotlin.jps.build.KotlinBuilder
import org.jetbrains.kotlin.jps.incremental.storage.BasicMap
import org.jetbrains.kotlin.jps.incremental.storage.BasicStringMap
-import org.jetbrains.kotlin.load.java.JvmAnnotationNames
import org.jetbrains.kotlin.load.kotlin.KotlinJvmBinaryClass
import org.jetbrains.kotlin.load.kotlin.ModuleMapping
import org.jetbrains.kotlin.load.kotlin.PackageClassUtils
@@ -216,10 +215,11 @@ public class IncrementalCacheImpl(
constantsMap.process(kotlinClass) +
inlineFunctionsMap.process(kotlinClass)
}
- header.isCompatibleClassKind() && JvmAnnotationNames.KotlinClass.Kind.CLASS == header.classKind ->
+ header.isCompatibleClassKind() && !header.isLocalClass -> {
protoMap.process(kotlinClass, isPackage = false) +
constantsMap.process(kotlinClass) +
inlineFunctionsMap.process(kotlinClass)
+ }
else -> ChangesInfo.NO_CHANGES
}
|
f55b7c350b928aa5419a1e3ee0fdbe4808731038
|
orientdb
|
Renamed threads to a more understandable ones--
|
p
|
https://github.com/orientechnologies/orientdb
|
diff --git a/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemoteServiceThread.java b/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemoteServiceThread.java
index 341c54279be..fd925c86a8b 100644
--- a/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemoteServiceThread.java
+++ b/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemoteServiceThread.java
@@ -18,6 +18,7 @@
import java.io.IOException;
import com.orientechnologies.common.thread.OSoftThread;
+import com.orientechnologies.orient.core.Orient;
import com.orientechnologies.orient.core.record.ORecordInternal;
import com.orientechnologies.orient.enterprise.channel.binary.OChannelBinaryClient;
import com.orientechnologies.orient.enterprise.channel.binary.OChannelBinaryProtocol;
@@ -33,7 +34,7 @@ public class OStorageRemoteServiceThread extends OSoftThread {
private OChannelBinaryClient network;
public OStorageRemoteServiceThread(final OStorageRemoteThread iStorageRemote, final OChannelBinaryClient iFirstChannel) {
- super("ClientService");
+ super(Orient.getThreadGroup(), "OrientDB AsynchRemoteStorageService");
storage = iStorageRemote;
network = iFirstChannel;
start();
diff --git a/server/src/main/java/com/orientechnologies/orient/server/clustering/ODiscoveryListener.java b/server/src/main/java/com/orientechnologies/orient/server/clustering/ODiscoveryListener.java
index ec7fb7fb654..17a0af8997b 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/clustering/ODiscoveryListener.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/clustering/ODiscoveryListener.java
@@ -37,7 +37,7 @@ public class ODiscoveryListener extends OSoftThread {
private MulticastSocket socket;
public ODiscoveryListener(final ODistributedServerManager iManager, final OServerNetworkListener iNetworkListener) {
- super(Orient.getThreadGroup(), "IO-Cluster-DiscoveryListener");
+ super(Orient.getThreadGroup(), "OrientDB Distributed-DiscoveryListener");
manager = iManager;
binaryNetworkListener = iNetworkListener;
diff --git a/server/src/main/java/com/orientechnologies/orient/server/clustering/ODiscoverySignaler.java b/server/src/main/java/com/orientechnologies/orient/server/clustering/ODiscoverySignaler.java
index b7b06544bff..59006e91cdb 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/clustering/ODiscoverySignaler.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/clustering/ODiscoverySignaler.java
@@ -43,7 +43,7 @@ public class ODiscoverySignaler extends OPollerThread {
private TimerTask runningTask;
public ODiscoverySignaler(final ODistributedServerManager iManager, final OServerNetworkListener iNetworkListener) {
- super(iManager.getConfig().networkMulticastHeartbeat * 1000, Orient.getThreadGroup(), "IO-Cluster-DiscoverySignaler");
+ super(iManager.getConfig().networkMulticastHeartbeat * 1000, Orient.getThreadGroup(), "OrientDB Distributed-DiscoverySignaler");
manager = iManager;
diff --git a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/distributed/ONetworkProtocolDistributed.java b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/distributed/ONetworkProtocolDistributed.java
index a533d63f4eb..f13bc6ff31f 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/distributed/ONetworkProtocolDistributed.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/distributed/ONetworkProtocolDistributed.java
@@ -45,7 +45,7 @@ public class ONetworkProtocolDistributed extends ONetworkProtocolBinary implemen
private ODistributedServerManager manager;
public ONetworkProtocolDistributed() {
- super("Distributed-DB");
+ super("OrientDB DistributedBinaryNetworkProtocolListener");
manager = OServerMain.server().getHandler(ODistributedServerManager.class);
if (manager == null)
@@ -89,8 +89,8 @@ protected void parseCommand() throws IOException, InterruptedException {
channel.writeByte((byte) 0);
channel.flush();
- OLogManager.instance().warn(this, "Current node remains the Leader of the cluster because it has lower network address",
- leaderAddress);
+ OLogManager.instance().warn(this,
+ "Current node remains the Leader of the cluster because it has lower network address", leaderAddress);
return;
}
}
|
a47b1d692d7ccf7d7c087607c394e55690f60655
|
camel
|
Polished code--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@812510 13f79535-47bb-0310-9956-ffa450edef68-
|
p
|
https://github.com/apache/camel
|
diff --git a/camel-core/src/main/java/org/apache/camel/impl/DefaultExchangeHolder.java b/camel-core/src/main/java/org/apache/camel/impl/DefaultExchangeHolder.java
index 855ef105a799e..67a87e4744555 100644
--- a/camel-core/src/main/java/org/apache/camel/impl/DefaultExchangeHolder.java
+++ b/camel-core/src/main/java/org/apache/camel/impl/DefaultExchangeHolder.java
@@ -117,7 +117,7 @@ private static Object checkSerializableObject(String type, Exchange exchange, Ob
if (converted != null) {
return converted;
} else {
- LOG.warn(type + " containig object: " + object + " of type: " + object.getClass().getCanonicalName() + " cannot be serialized, it will be excluded by the holder");
+ LOG.warn(type + " containing object: " + object + " of type: " + object.getClass().getCanonicalName() + " cannot be serialized, it will be excluded by the holder.");
return null;
}
}
@@ -134,7 +134,7 @@ private static Map<String, Object> checkMapSerializableObjects(String type, Exch
result.put(entry.getKey(), converted);
} else {
LOG.warn(type + " containing object: " + entry.getValue() + " with key: " + entry.getKey()
- + " cannot be serialized, it will be excluded by the holder");
+ + " cannot be serialized, it will be excluded by the holder.");
}
}
diff --git a/camel-core/src/main/java/org/apache/camel/impl/DefaultUnitOfWork.java b/camel-core/src/main/java/org/apache/camel/impl/DefaultUnitOfWork.java
index 02d21e9234d9a..b15063a3361d5 100644
--- a/camel-core/src/main/java/org/apache/camel/impl/DefaultUnitOfWork.java
+++ b/camel-core/src/main/java/org/apache/camel/impl/DefaultUnitOfWork.java
@@ -129,7 +129,7 @@ public void done(Exchange exchange) {
}
} catch (Exception e) {
// must catch exceptions to ensure all synchronizations have a chance to run
- LOG.error("Exception occurred during onCompletion. This exception will be ignored: ", e);
+ LOG.warn("Exception occurred during onCompletion. This exception will be ignored: ", e);
}
}
}
diff --git a/camel-core/src/main/java/org/apache/camel/processor/MulticastProcessor.java b/camel-core/src/main/java/org/apache/camel/processor/MulticastProcessor.java
index 6c90c754ded9f..53acee28ce1b6 100644
--- a/camel-core/src/main/java/org/apache/camel/processor/MulticastProcessor.java
+++ b/camel-core/src/main/java/org/apache/camel/processor/MulticastProcessor.java
@@ -127,7 +127,7 @@ public void process(Exchange exchange) throws Exception {
if (isParallelProcessing()) {
doProcessParallel(result, pairs, isStreaming());
} else {
- doProcessSequntial(result, pairs);
+ doProcessSequential(result, pairs);
}
if (result.get() != null) {
@@ -197,7 +197,7 @@ public Exchange call() throws Exception {
}
}
- protected void doProcessSequntial(AtomicExchange result, Iterable<ProcessorExchangePair> pairs) throws Exception {
+ protected void doProcessSequential(AtomicExchange result, Iterable<ProcessorExchangePair> pairs) throws Exception {
int total = 0;
for (ProcessorExchangePair pair : pairs) {
@@ -214,11 +214,11 @@ protected void doProcessSequntial(AtomicExchange result, Iterable<ProcessorExcha
// should we stop in case of an exception occured during processing?
if (stopOnException && subExchange.getException() != null) {
- throw new CamelExchangeException("Sequiental processing failed for number " + total, subExchange, subExchange.getException());
+ throw new CamelExchangeException("Sequential processing failed for number " + total, subExchange, subExchange.getException());
}
if (LOG.isTraceEnabled()) {
- LOG.trace("Sequiental processing complete for number " + total + " exchange: " + subExchange);
+ LOG.trace("Sequential processing complete for number " + total + " exchange: " + subExchange);
}
if (aggregationStrategy != null) {
@@ -228,7 +228,7 @@ protected void doProcessSequntial(AtomicExchange result, Iterable<ProcessorExcha
}
if (LOG.isDebugEnabled()) {
- LOG.debug("Done sequiental processing " + total + " exchanges");
+ LOG.debug("Done sequential processing " + total + " exchanges");
}
}
diff --git a/camel-core/src/main/java/org/apache/camel/processor/OnCompletionProcessor.java b/camel-core/src/main/java/org/apache/camel/processor/OnCompletionProcessor.java
index f3e8d3ae077fd..45f35581e4e37 100644
--- a/camel-core/src/main/java/org/apache/camel/processor/OnCompletionProcessor.java
+++ b/camel-core/src/main/java/org/apache/camel/processor/OnCompletionProcessor.java
@@ -109,7 +109,7 @@ public void onFailure(Exchange exchange) {
// must use a copy as we dont want it to cause side effects of the original exchange
final Exchange copy = prepareExchange(exchange);
- // must remove exception otherwise onFaulure routing will fail as well
+ // must remove exception otherwise onFailure routing will fail as well
// the caused exception is stored as a property (Exchange.EXCEPTION_CAUGHT) on the exchange
copy.setException(null);
diff --git a/camel-core/src/main/java/org/apache/camel/processor/StreamResequencer.java b/camel-core/src/main/java/org/apache/camel/processor/StreamResequencer.java
index 46db22cd1dcb9..e01a2e6b6fc26 100644
--- a/camel-core/src/main/java/org/apache/camel/processor/StreamResequencer.java
+++ b/camel-core/src/main/java/org/apache/camel/processor/StreamResequencer.java
@@ -33,6 +33,7 @@
import org.apache.camel.processor.resequencer.SequenceSender;
import org.apache.camel.spi.ExceptionHandler;
import org.apache.camel.util.ServiceHelper;
+import org.apache.camel.util.concurrent.ExecutorServiceHelper;
/**
* A resequencer that re-orders a (continuous) stream of {@link Exchange}s. The
@@ -187,7 +188,7 @@ private class Delivery extends Thread {
private Condition deliveryRequestCondition = deliveryRequestLock.newCondition();
public Delivery() {
- super("Resequencer Delivery Thread");
+ super(ExecutorServiceHelper.getThreadName("Resequencer Delivery"));
}
@Override
diff --git a/camel-core/src/main/java/org/apache/camel/processor/loadbalancer/FailOverLoadBalancer.java b/camel-core/src/main/java/org/apache/camel/processor/loadbalancer/FailOverLoadBalancer.java
index 25bb8eb164d9e..40ec9fa4c2562 100644
--- a/camel-core/src/main/java/org/apache/camel/processor/loadbalancer/FailOverLoadBalancer.java
+++ b/camel-core/src/main/java/org/apache/camel/processor/loadbalancer/FailOverLoadBalancer.java
@@ -37,7 +37,7 @@ public FailOverLoadBalancer(List<Class> exceptions) {
this.exceptions = exceptions;
for (Class type : exceptions) {
if (!ObjectHelper.isAssignableFrom(Throwable.class, type)) {
- throw new IllegalArgumentException("Class is not an instance of Trowable: " + type);
+ throw new IllegalArgumentException("Class is not an instance of Throwable: " + type);
}
}
}
diff --git a/camel-core/src/main/java/org/apache/camel/processor/resequencer/ResequencerEngine.java b/camel-core/src/main/java/org/apache/camel/processor/resequencer/ResequencerEngine.java
index 514db1f5848ba..b20b7f635ed59 100644
--- a/camel-core/src/main/java/org/apache/camel/processor/resequencer/ResequencerEngine.java
+++ b/camel-core/src/main/java/org/apache/camel/processor/resequencer/ResequencerEngine.java
@@ -18,6 +18,8 @@
import java.util.Timer;
+import org.apache.camel.util.concurrent.ExecutorServiceHelper;
+
/**
* Resequences elements based on a given {@link SequenceElementComparator}.
* This resequencer is designed for resequencing element streams. Stream-based
@@ -97,7 +99,7 @@ public ResequencerEngine(SequenceElementComparator<E> comparator) {
}
public void start() {
- timer = new Timer("Camel Stream Resequencer Timer", true);
+ timer = new Timer(ExecutorServiceHelper.getThreadName("Stream Resequencer Timer"), true);
}
/**
diff --git a/camel-core/src/test/java/org/apache/camel/issues/CharlesSplitAndTryCatchRollbackIssueTest.java b/camel-core/src/test/java/org/apache/camel/issues/CharlesSplitAndTryCatchRollbackIssueTest.java
index b58a86631bdc9..8be2b4f40373e 100644
--- a/camel-core/src/test/java/org/apache/camel/issues/CharlesSplitAndTryCatchRollbackIssueTest.java
+++ b/camel-core/src/test/java/org/apache/camel/issues/CharlesSplitAndTryCatchRollbackIssueTest.java
@@ -72,7 +72,7 @@ public void testSplitWithTryCatchAndRollbackException() throws Exception {
fail("Should thrown an exception");
} catch (CamelExecutionException e) {
CamelExchangeException ee = assertIsInstanceOf(CamelExchangeException.class, e.getCause());
- assertEquals("Sequiental processing failed for number 2 on the exchange: Exchange[Message: Kaboom]", ee.getMessage());
+ assertEquals("Sequential processing failed for number 2 on the exchange: Exchange[Message: Kaboom]", ee.getMessage());
RollbackExchangeException re = assertIsInstanceOf(RollbackExchangeException.class, ee.getCause());
assertEquals("Intended rollback on the exchange: Exchange[Message: Kaboom]", re.getMessage());
}
@@ -94,7 +94,7 @@ public void testSplitWithTryCatchAndRollbacILEAndException() throws Exception {
fail("Should thrown an exception");
} catch (CamelExecutionException e) {
CamelExchangeException ee = assertIsInstanceOf(CamelExchangeException.class, e.getCause());
- assertEquals("Sequiental processing failed for number 3 on the exchange: Exchange[Message: Kaboom]", ee.getMessage());
+ assertEquals("Sequential processing failed for number 3 on the exchange: Exchange[Message: Kaboom]", ee.getMessage());
RollbackExchangeException re = assertIsInstanceOf(RollbackExchangeException.class, ee.getCause());
assertEquals("Intended rollback on the exchange: Exchange[Message: Kaboom]", re.getMessage());
}
diff --git a/camel-core/src/test/java/org/apache/camel/processor/MulticastStopOnExceptionTest.java b/camel-core/src/test/java/org/apache/camel/processor/MulticastStopOnExceptionTest.java
index 5e4e95a1504cb..e131898abbdb7 100644
--- a/camel-core/src/test/java/org/apache/camel/processor/MulticastStopOnExceptionTest.java
+++ b/camel-core/src/test/java/org/apache/camel/processor/MulticastStopOnExceptionTest.java
@@ -51,7 +51,7 @@ public void testMulticastStopOnExceptionStop() throws Exception {
fail("Should thrown an exception");
} catch (CamelExecutionException e) {
CamelExchangeException cause = assertIsInstanceOf(CamelExchangeException.class, e.getCause());
- assertEquals("Sequiental processing failed for number 1 on the exchange: Exchange[Message: Kaboom]", cause.getMessage());
+ assertEquals("Sequential processing failed for number 1 on the exchange: Exchange[Message: Kaboom]", cause.getMessage());
assertEquals("Forced", cause.getCause().getMessage());
}
diff --git a/camel-core/src/test/java/org/apache/camel/processor/SplitterStopOnExceptionTest.java b/camel-core/src/test/java/org/apache/camel/processor/SplitterStopOnExceptionTest.java
index 1643e0d07fa8d..d78b6dc36eb5f 100644
--- a/camel-core/src/test/java/org/apache/camel/processor/SplitterStopOnExceptionTest.java
+++ b/camel-core/src/test/java/org/apache/camel/processor/SplitterStopOnExceptionTest.java
@@ -48,7 +48,7 @@ public void testSplitStopOnExceptionStop() throws Exception {
fail("Should thrown an exception");
} catch (CamelExecutionException e) {
CamelExchangeException cause = assertIsInstanceOf(CamelExchangeException.class, e.getCause());
- assertEquals("Sequiental processing failed for number 1 on the exchange: Exchange[Message: Kaboom]", cause.getMessage());
+ assertEquals("Sequential processing failed for number 1 on the exchange: Exchange[Message: Kaboom]", cause.getMessage());
assertEquals("Forced", cause.getCause().getMessage());
}
|
8738a12d94509246cbb435474e03fc4421dd2537
|
drools
|
JBRULES-447 - small fix for rule attributes--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@6003 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-decisiontables/src/main/java/org/drools/decisiontable/model/Rule.java b/drools-decisiontables/src/main/java/org/drools/decisiontable/model/Rule.java
index d5a40702968..6b3ba6b5659 100644
--- a/drools-decisiontables/src/main/java/org/drools/decisiontable/model/Rule.java
+++ b/drools-decisiontables/src/main/java/org/drools/decisiontable/model/Rule.java
@@ -100,13 +100,13 @@ public void renderDRL(final DRLOutput out) {
out.writeLine( "\tsalience " + this._salience );
}
if ( this._activationGroup != null ) {
- out.writeLine( "\tactivation-group" + this._activationGroup );
+ out.writeLine( "\tactivation-group " + this._activationGroup );
}
if ( this._noLoop != null ) {
- out.writeLine( "\tno-loop" + this._noLoop );
+ out.writeLine( "\tno-loop " + this._noLoop );
}
if ( this._duration != null ) {
- out.writeLine( "\tduration" + this._duration );
+ out.writeLine( "\tduration " + this._duration );
}
out.writeLine( "\twhen" );
@@ -239,14 +239,6 @@ public void setNoLoop(final String value) // Set the no-loop attribute of the ru
this._noLoop = value;
}
- public boolean getNoLoop() {
- String value = "false";
- if ( this._noLoop.compareTo( "true" ) != 0 ) {
- value = this._noLoop;
- }
- final Boolean b = new Boolean( value );
- return b.booleanValue();
- }
/**
* @return The row in the spreadsheet this represents.
|
f3362eb788219ecc55d4b648e2fb589e8d6171d1
|
ReactiveX-RxJava
|
add error handling for onNext failure so exceptions- don't get thrown up the stack but instead via onError--
|
c
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/util/AtomicObserver.java b/rxjava-core/src/main/java/rx/util/AtomicObserver.java
index 86eee9ccfa..fe04125bf9 100644
--- a/rxjava-core/src/main/java/rx/util/AtomicObserver.java
+++ b/rxjava-core/src/main/java/rx/util/AtomicObserver.java
@@ -67,8 +67,13 @@ public void onError(Exception e) {
@Override
public void onNext(T args) {
- if (!isFinished.get()) {
- actual.onNext(args);
+ try {
+ if (!isFinished.get()) {
+ actual.onNext(args);
+ }
+ }catch(Exception e) {
+ // handle errors if the onNext implementation fails, not just if the Observable fails
+ onError(e);
}
}
|
48053b56631374d50fc9075e39fea70da419a5c8
|
orientdb
|
implemented integration of globla property with- binary serialization--
|
a
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/binary/ORecordSerializerBinaryV0.java b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/binary/ORecordSerializerBinaryV0.java
index ba347d5bf49..90d03f9984a 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/binary/ORecordSerializerBinaryV0.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/serialization/serializer/record/binary/ORecordSerializerBinaryV0.java
@@ -22,10 +22,12 @@
import com.orientechnologies.orient.core.db.record.OTrackedMap;
import com.orientechnologies.orient.core.db.record.OTrackedSet;
import com.orientechnologies.orient.core.db.record.ridbag.ORidBag;
+import com.orientechnologies.orient.core.exception.ODatabaseException;
import com.orientechnologies.orient.core.id.OClusterPositionLong;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.id.ORecordId;
import com.orientechnologies.orient.core.metadata.schema.OClass;
+import com.orientechnologies.orient.core.metadata.schema.OGlobalProperty;
import com.orientechnologies.orient.core.metadata.schema.OProperty;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.ORecordInternal;
@@ -52,15 +54,38 @@ public void deserialize(final ODocument document, final BytesContainer bytes) {
document.setClassNameIfExists(className);
int last = 0;
String field;
- while ((field = readString(bytes)).length() != 0) {
+ while (true) {
+ OGlobalProperty prop = null;
+ final int len = OVarIntSerializer.readAsInteger(bytes);
+ if (len == 0)
+ break;
+ else if (len > 0) {
+ final String res = new String(bytes.bytes, bytes.offset, len, utf8);
+ bytes.skip(len);
+ field = res;
+ } else {
+ ODatabaseRecord db = document.getDatabase();
+ if (db == null || db.isClosed())
+ throw new ODatabaseException("Impossible deserialize the document no database present");
+ prop = db.getMetadata().getSchema().getGlobalPropertyById((len * -1) - 1);
+ field = prop.getName();
+ }
+
if (document.containsField(field)) {
// SKIP FIELD
- bytes.skip(OIntegerSerializer.INT_SIZE + 1);
+ if (prop != null && prop.getType() != OType.ANY)
+ bytes.skip(OIntegerSerializer.INT_SIZE);
+ else
+ bytes.skip(OIntegerSerializer.INT_SIZE + 1);
continue;
}
final int valuePos = readInteger(bytes);
- final OType type = readOType(bytes);
+ final OType type;
+ if (prop != null && prop.getType() != OType.ANY)
+ type = prop.getType();
+ else
+ type = readOType(bytes);
if (valuePos != 0) {
int headerCursor = bytes.offset;
@@ -89,11 +114,21 @@ public void serialize(ODocument document, BytesContainer bytes) {
else
writeEmptyString(bytes);
int[] pos = new int[document.fields()];
+ OProperty[] properties = new OProperty[document.fields()];
int i = 0;
Entry<String, ?> values[] = new Entry[document.fields()];
for (Entry<String, Object> entry : document) {
- writeString(bytes, entry.getKey());
- pos[i] = bytes.alloc(OIntegerSerializer.INT_SIZE + 1);
+ properties[i] = getSchemaProperty(document, entry.getKey());
+ if (properties[i] != null) {
+ OVarIntSerializer.write(bytes, (properties[i].getId() + 1) * -1);
+ if (properties[i].getType() != OType.ANY)
+ pos[i] = bytes.alloc(OIntegerSerializer.INT_SIZE);
+ else
+ pos[i] = bytes.alloc(OIntegerSerializer.INT_SIZE + 1);
+ } else {
+ writeString(bytes, entry.getKey());
+ pos[i] = bytes.alloc(OIntegerSerializer.INT_SIZE + 1);
+ }
values[i] = entry;
i++;
}
@@ -109,7 +144,8 @@ public void serialize(ODocument document, BytesContainer bytes) {
continue;
pointer = writeSingleValue(bytes, value, type, getLinkedType(document, type, values[i].getKey()));
OIntegerSerializer.INSTANCE.serialize(pointer, bytes.bytes, pos[i]);
- writeOType(bytes, (pos[i] + OIntegerSerializer.INT_SIZE), type);
+ if (properties[i] == null || properties[i].getType() == OType.ANY)
+ writeOType(bytes, (pos[i] + OIntegerSerializer.INT_SIZE), type);
}
}
@@ -539,6 +575,13 @@ private int writeEmbeddedCollection(BytesContainer bytes, Collection<?> value, O
return pos;
}
+ private OProperty getSchemaProperty(ODocument document, String key) {
+ OClass clazz = document.getSchemaClass();
+ if (clazz != null)
+ return clazz.getProperty(key);
+ return null;
+ }
+
private OType getFieldType(ODocument document, String key, Object fieldValue) {
OType type = document.fieldType(key);
if (type == null) {
|
42d43310ecb03895bc327bdc788375fde57b5f2d
|
drools
|
JBRULES-527: adding primitive support to indexing--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@7158 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
|
a
|
https://github.com/kiegroup/drools
|
diff --git a/drools-core/src/main/java/org/drools/base/evaluators/ArrayFactory.java b/drools-core/src/main/java/org/drools/base/evaluators/ArrayFactory.java
index 66fa281998a..67ec1c422e6 100644
--- a/drools-core/src/main/java/org/drools/base/evaluators/ArrayFactory.java
+++ b/drools-core/src/main/java/org/drools/base/evaluators/ArrayFactory.java
@@ -101,9 +101,21 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return ((ObjectVariableContextEntry) context).left.equals( value );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Object value1 = extractor.getValue( object1 );
+ final Object value2 = extractor.getValue( object2 );
+ if ( value1 == null ) {
+ return value2 == null;
+ }
+ return value1.equals( value2 );
+ }
+
public String toString() {
return "Array ==";
}
+
}
static class ArrayNotEqualEvaluator extends BaseEvaluator {
@@ -124,7 +136,7 @@ public boolean evaluate(final Extractor extractor,
final Object value1 = extractor.getValue( object1 );
final Object value2 = object2.getValue();
if ( value1 == null ) {
- return value2 == null;
+ return value2 != null;
}
return !value1.equals( value2 );
}
@@ -133,7 +145,7 @@ public boolean evaluateCachedRight(final VariableContextEntry context,
final Object left) {
final Object value = context.declaration.getExtractor().getValue( left );
if ( value == null ) {
- return ((ObjectVariableContextEntry) context).right == null;
+ return ((ObjectVariableContextEntry) context).right != null;
}
return !value.equals( ((ObjectVariableContextEntry) context).right );
}
@@ -142,11 +154,22 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
final Object right) {
final Object value = context.extractor.getValue( right );
if ( ((ObjectVariableContextEntry) context).left == null ) {
- return value == null;
+ return value != null;
}
return !((ObjectVariableContextEntry) context).left.equals( value );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Object value1 = extractor.getValue( object1 );
+ final Object value2 = extractor.getValue( object2 );
+ if ( value1 == null ) {
+ return value2 != null;
+ }
+ return !value1.equals( value2 );
+ }
+
public String toString() {
return "Array !=";
}
@@ -169,34 +192,34 @@ public boolean evaluate(final Extractor extractor,
final FieldValue object2) {
final Object value = object2.getValue();
final Object[] array = (Object[]) extractor.getValue( object1 );
-
- if ( Arrays.binarySearch( array,
- value ) == -1 ) {
- return false;
- }
- return true;
+ return Arrays.binarySearch( array,
+ value ) != -1;
}
public boolean evaluateCachedRight(final VariableContextEntry context,
final Object left) {
final Object value = context.declaration.getExtractor().getValue( left );
final Object[] array = (Object[]) ((ObjectVariableContextEntry) context).right;
- if ( Arrays.binarySearch( array,
- value ) == -1 ) {
- return false;
- }
- return true;
+ return Arrays.binarySearch( array,
+ value ) != -1;
}
public boolean evaluateCachedLeft(final VariableContextEntry context,
final Object right) {
final Object value = ((ObjectVariableContextEntry) context).left;
final Object[] array = (Object[]) context.extractor.getValue( right );
- if ( Arrays.binarySearch( array,
- value ) == -1 ) {
- return false;
- }
- return true;
+ return Arrays.binarySearch( array,
+ value ) != -1;
+ }
+
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Object value = extractor.getValue( object2 );
+ final Object[] array = (Object[]) extractor.getValue( object1 );
+
+ return Arrays.binarySearch( array,
+ value ) != -1 ;
}
public String toString() {
diff --git a/drools-core/src/main/java/org/drools/base/evaluators/BigDecimalFactory.java b/drools-core/src/main/java/org/drools/base/evaluators/BigDecimalFactory.java
index 10195cfbac7..91114ce6ff8 100644
--- a/drools-core/src/main/java/org/drools/base/evaluators/BigDecimalFactory.java
+++ b/drools-core/src/main/java/org/drools/base/evaluators/BigDecimalFactory.java
@@ -103,9 +103,21 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return ((ObjectVariableContextEntry) context).left.equals( value );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Object value1 = extractor.getValue( object1 );
+ final Object value2 = extractor.getValue( object2 );
+ if ( value1 == null ) {
+ return value2 == null;
+ }
+ return value1.equals( value2 );
+ }
+
public String toString() {
return "BigDecimal ==";
}
+
}
static class BigDecimalNotEqualEvaluator extends BaseEvaluator {
@@ -149,6 +161,17 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return !((ObjectVariableContextEntry) context).left.equals( value );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Object value1 = extractor.getValue( object1 );
+ final Object value2 = extractor.getValue( object2 );
+ if ( value1 == null ) {
+ return value2 != null;
+ }
+ return !value1.equals( value2 );
+ }
+
public String toString() {
return "BigDecimal !=";
}
@@ -185,6 +208,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return comp.compareTo( ((ObjectVariableContextEntry) context).left ) < 0;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final BigDecimal comp = (BigDecimal) extractor.getValue( object1 );
+ return comp.compareTo( extractor.getValue( object2 ) ) < 0;
+ }
+
public String toString() {
return "BigDecimal <";
}
@@ -221,6 +251,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return comp.compareTo( ((ObjectVariableContextEntry) context).left ) <= 0;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final BigDecimal comp = (BigDecimal) extractor.getValue( object1 );
+ return comp.compareTo( extractor.getValue( object2 ) ) <= 0;
+ }
+
public String toString() {
return "BigDecimal <=";
}
@@ -257,6 +294,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return comp.compareTo( ((ObjectVariableContextEntry) context).left ) > 0;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final BigDecimal comp = (BigDecimal) extractor.getValue( object1 );
+ return comp.compareTo( extractor.getValue( object2 ) ) > 0;
+ }
+
public String toString() {
return "BigDecimal >";
}
@@ -293,6 +337,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return comp.compareTo( ((ObjectVariableContextEntry) context).left ) >= 0;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final BigDecimal comp = (BigDecimal) extractor.getValue( object1 );
+ return comp.compareTo( extractor.getValue( object2 ) ) >= 0;
+ }
+
public String toString() {
return "BigDecimal >=";
}
diff --git a/drools-core/src/main/java/org/drools/base/evaluators/BigIntegerFactory.java b/drools-core/src/main/java/org/drools/base/evaluators/BigIntegerFactory.java
index d689acb07cf..c809c1ff841 100644
--- a/drools-core/src/main/java/org/drools/base/evaluators/BigIntegerFactory.java
+++ b/drools-core/src/main/java/org/drools/base/evaluators/BigIntegerFactory.java
@@ -103,6 +103,17 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return ((ObjectVariableContextEntry) context).left.equals( value );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Object value1 = extractor.getValue( object1 );
+ final Object value2 = extractor.getValue( object2 );
+ if ( value1 == null ) {
+ return value2 == null;
+ }
+ return value1.equals( value2 );
+ }
+
public String toString() {
return "BigInteger ==";
}
@@ -149,6 +160,17 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return !((ObjectVariableContextEntry) context).left.equals( value );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Object value1 = extractor.getValue( object1 );
+ final Object value2 = extractor.getValue( object2 );
+ if ( value1 == null ) {
+ return value2 == null;
+ }
+ return value1.equals( value2 );
+ }
+
public String toString() {
return "BigInteger !=";
}
@@ -185,6 +207,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return comp.compareTo( ((ObjectVariableContextEntry) context).left ) < 0;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final BigInteger comp = (BigInteger) extractor.getValue( object1 );
+ return comp.compareTo( extractor.getValue( object2 ) ) < 0;
+ }
+
public String toString() {
return "BigInteger <";
}
@@ -221,6 +250,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return comp.compareTo( ((ObjectVariableContextEntry) context).left ) <= 0;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final BigInteger comp = (BigInteger) extractor.getValue( object1 );
+ return comp.compareTo( extractor.getValue( object2 ) ) <= 0;
+ }
+
public String toString() {
return "BigInteger <=";
}
@@ -257,6 +293,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return comp.compareTo( ((ObjectVariableContextEntry) context).left ) > 0;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final BigInteger comp = (BigInteger) extractor.getValue( object1 );
+ return comp.compareTo( extractor.getValue( object2 ) ) > 0;
+ }
+
public String toString() {
return "BigInteger >";
}
@@ -293,6 +336,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return comp.compareTo( ((ObjectVariableContextEntry) context).left ) >= 0;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final BigInteger comp = (BigInteger) extractor.getValue( object1 );
+ return comp.compareTo( extractor.getValue( object2 ) ) >= 0;
+ }
+
public String toString() {
return "BigInteger >=";
}
diff --git a/drools-core/src/main/java/org/drools/base/evaluators/BooleanFactory.java b/drools-core/src/main/java/org/drools/base/evaluators/BooleanFactory.java
index 53db9a6ba47..04aa9ee4bd4 100644
--- a/drools-core/src/main/java/org/drools/base/evaluators/BooleanFactory.java
+++ b/drools-core/src/main/java/org/drools/base/evaluators/BooleanFactory.java
@@ -80,9 +80,16 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getBooleanValue( object2 ) == ((BooleanVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getBooleanValue( object1 ) == extractor.getBooleanValue( object2 );
+ }
+
public String toString() {
return "Boolean ==";
}
+
}
static class BooleanNotEqualEvaluator extends BaseEvaluator {
@@ -113,6 +120,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getBooleanValue( object2 ) != ((BooleanVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getBooleanValue( object1 ) != extractor.getBooleanValue( object2 );
+ }
+
public String toString() {
return "Boolean !=";
}
diff --git a/drools-core/src/main/java/org/drools/base/evaluators/ByteFactory.java b/drools-core/src/main/java/org/drools/base/evaluators/ByteFactory.java
index 208db1e1cca..9ca7b31128a 100644
--- a/drools-core/src/main/java/org/drools/base/evaluators/ByteFactory.java
+++ b/drools-core/src/main/java/org/drools/base/evaluators/ByteFactory.java
@@ -88,9 +88,16 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return ((LongVariableContextEntry) context).left == context.extractor.getByteValue( right );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getByteValue( object1 ) == extractor.getByteValue( object2 );
+ }
+
public String toString() {
return "Byte ==";
}
+
}
static class ByteNotEqualEvaluator extends BaseEvaluator {
@@ -121,6 +128,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return ((LongVariableContextEntry) context).left != context.extractor.getByteValue( right );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getByteValue( object1 ) != extractor.getByteValue( object2 );
+ }
+
public String toString() {
return "Byte !=";
}
@@ -154,6 +167,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getByteValue( right ) < ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getByteValue( object1 ) < extractor.getByteValue( object2 );
+ }
+
public String toString() {
return "Byte <";
}
@@ -187,6 +206,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getByteValue( right ) <= ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getByteValue( object1 ) <= extractor.getByteValue( object2 );
+ }
+
public String toString() {
return "Byte <=";
}
@@ -220,6 +245,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getByteValue( right ) > ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getByteValue( object1 ) > extractor.getByteValue( object2 );
+ }
+
public String toString() {
return "Byte >";
}
@@ -253,6 +284,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getByteValue( right ) >= ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getByteValue( object1 ) >= extractor.getByteValue( object2 );
+ }
+
public String toString() {
return "Byte >=";
}
diff --git a/drools-core/src/main/java/org/drools/base/evaluators/CharacterFactory.java b/drools-core/src/main/java/org/drools/base/evaluators/CharacterFactory.java
index 37876d93458..67366e30807 100644
--- a/drools-core/src/main/java/org/drools/base/evaluators/CharacterFactory.java
+++ b/drools-core/src/main/java/org/drools/base/evaluators/CharacterFactory.java
@@ -88,6 +88,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return ((LongVariableContextEntry) context).left == context.extractor.getCharValue( right );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getCharValue( object1 ) == extractor.getCharValue( object2 );
+ }
+
public String toString() {
return "Character ==";
}
@@ -121,6 +127,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return ((LongVariableContextEntry) context).left != context.extractor.getCharValue( right );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getCharValue( object1 ) != extractor.getCharValue( object2 );
+ }
+
public String toString() {
return "Character !=";
}
@@ -154,6 +166,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getCharValue( right ) <((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getCharValue( object1 ) < extractor.getCharValue( object2 );
+ }
+
public String toString() {
return "Character <";
}
@@ -187,6 +205,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getCharValue( right ) <= ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getCharValue( object1 ) <= extractor.getCharValue( object2 );
+ }
+
public String toString() {
return "Character <=";
}
@@ -220,6 +244,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getCharValue( right ) > ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getCharValue( object1 ) > extractor.getCharValue( object2 );
+ }
+
public String toString() {
return "Character >";
}
@@ -253,6 +283,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getCharValue( right ) >= ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getCharValue( object1 ) >= extractor.getCharValue( object2 );
+ }
+
public String toString() {
return "Character >=";
}
diff --git a/drools-core/src/main/java/org/drools/base/evaluators/DateFactory.java b/drools-core/src/main/java/org/drools/base/evaluators/DateFactory.java
index bb22ef9eb4a..5391489f24a 100644
--- a/drools-core/src/main/java/org/drools/base/evaluators/DateFactory.java
+++ b/drools-core/src/main/java/org/drools/base/evaluators/DateFactory.java
@@ -131,9 +131,21 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return value1.compareTo( getRightDate( value2 ) ) == 0;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Date value1 = (Date) extractor.getValue( object1 );
+ final Date value2 = (Date) extractor.getValue( object2 );
+ if ( value1 == null ) {
+ return value2 == null;
+ }
+ return value1.compareTo( value2 ) == 0;
+ }
+
public String toString() {
return "Date ==";
}
+
}
static class DateNotEqualEvaluator extends BaseEvaluator {
@@ -188,6 +200,17 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return value1.compareTo( getRightDate( value2 ) ) != 0;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Date value1 = (Date) extractor.getValue( object1 );
+ final Date value2 = (Date) extractor.getValue( object2 );
+ if ( value1 == null ) {
+ return value2 == null;
+ }
+ return value1.compareTo( value2 ) != 0;
+ }
+
public String toString() {
return "Date !=";
}
@@ -227,6 +250,17 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return getRightDate( value2 ).compareTo( value1 ) < 0;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Date value1 = (Date) extractor.getValue( object1 );
+ final Date value2 = (Date) extractor.getValue( object2 );
+ if ( value1 == null ) {
+ return value2 == null;
+ }
+ return value1.compareTo( value2 ) < 0;
+ }
+
public String toString() {
return "Date <";
}
@@ -266,6 +300,17 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return getRightDate( value2 ).compareTo( value1 ) <= 0;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Date value1 = (Date) extractor.getValue( object1 );
+ final Date value2 = (Date) extractor.getValue( object2 );
+ if ( value1 == null ) {
+ return value2 == null;
+ }
+ return value1.compareTo( value2 ) <= 0;
+ }
+
public String toString() {
return "Date <=";
}
@@ -305,6 +350,17 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return getRightDate( value2 ).compareTo( value1 ) > 0;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Date value1 = (Date) extractor.getValue( object1 );
+ final Date value2 = (Date) extractor.getValue( object2 );
+ if ( value1 == null ) {
+ return value2 == null;
+ }
+ return value1.compareTo( value2 ) > 0;
+ }
+
public String toString() {
return "Date >";
}
@@ -344,6 +400,17 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return getRightDate( value2 ).compareTo( value1 ) >= 0;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Date value1 = (Date) extractor.getValue( object1 );
+ final Date value2 = (Date) extractor.getValue( object2 );
+ if ( value1 == null ) {
+ return value2 == null;
+ }
+ return value1.compareTo( value2 ) >= 0;
+ }
+
public String toString() {
return "Date >=";
}
diff --git a/drools-core/src/main/java/org/drools/base/evaluators/DoubleFactory.java b/drools-core/src/main/java/org/drools/base/evaluators/DoubleFactory.java
index 088340dde7d..45d61d91c2c 100644
--- a/drools-core/src/main/java/org/drools/base/evaluators/DoubleFactory.java
+++ b/drools-core/src/main/java/org/drools/base/evaluators/DoubleFactory.java
@@ -91,6 +91,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return ((DoubleVariableContextEntry) context).left == context.extractor.getDoubleValue( right );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ // TODO: we are not handling delta right now... maybe we should
+ return extractor.getDoubleValue( object1 ) == extractor.getDoubleValue( object2 );
+ }
+
public String toString() {
return "Double ==";
}
@@ -127,6 +134,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return ((DoubleVariableContextEntry) context).left != context.extractor.getDoubleValue( right );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ // TODO: we are not handling delta right now... maybe we should
+ return extractor.getDoubleValue( object1 ) != extractor.getDoubleValue( object2 );
+ }
+
public String toString() {
return "Double !=";
}
@@ -163,6 +177,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getDoubleValue( right ) < ((DoubleVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ // TODO: we are not handling delta right now... maybe we should
+ return extractor.getDoubleValue( object1 ) < extractor.getDoubleValue( object2 );
+ }
+
public String toString() {
return "Double <";
}
@@ -199,6 +220,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getDoubleValue( right ) <= ((DoubleVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ // TODO: we are not handling delta right now... maybe we should
+ return extractor.getDoubleValue( object1 ) <= extractor.getDoubleValue( object2 );
+ }
+
public String toString() {
return "Double <=";
}
@@ -235,6 +263,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getDoubleValue( right ) > ((DoubleVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ // TODO: we are not handling delta right now... maybe we should
+ return extractor.getDoubleValue( object1 ) > extractor.getDoubleValue( object2 );
+ }
+
public String toString() {
return "Double >";
}
@@ -271,6 +306,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getDoubleValue( right ) >= ((DoubleVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ // TODO: we are not handling delta right now... maybe we should
+ return extractor.getDoubleValue( object1 ) >= extractor.getDoubleValue( object2 );
+ }
+
public String toString() {
return "Double >=";
}
diff --git a/drools-core/src/main/java/org/drools/base/evaluators/FactTemplateFactory.java b/drools-core/src/main/java/org/drools/base/evaluators/FactTemplateFactory.java
index 7433a9070b4..f1be3da3e8c 100644
--- a/drools-core/src/main/java/org/drools/base/evaluators/FactTemplateFactory.java
+++ b/drools-core/src/main/java/org/drools/base/evaluators/FactTemplateFactory.java
@@ -82,17 +82,6 @@ public boolean evaluate(final Extractor extractor,
return value1.equals( value2 );
}
- public boolean evaluate(final FieldValue object1,
- final Extractor extractor,
- final Object object2) {
- final Object value1 = object1.getValue();
- final Object value2 = extractor.getValue( object2 );
- if ( value1 == null ) {
- return value2 == null;
- }
- return value1.equals( value2 );
- }
-
public boolean evaluateCachedRight(final VariableContextEntry context,
final Object left) {
final Object value = context.declaration.getExtractor().getValue( left );
@@ -111,9 +100,21 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return ((ObjectVariableContextEntry) context).left.equals( value );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Object value1 = extractor.getValue( object1 );
+ final Object value2 = extractor.getValue( object2 );
+ if ( value1 == null ) {
+ return value2 == null;
+ }
+ return value1.equals( value2 );
+ }
+
public String toString() {
return "FactTemplate ==";
}
+
}
static class FactTemplateNotEqualEvaluator extends BaseEvaluator {
@@ -139,17 +140,6 @@ public boolean evaluate(final Extractor extractor,
return !value1.equals( value2 );
}
- public boolean evaluate(final FieldValue object1,
- final Extractor extractor,
- final Object object2) {
- final Object value1 = object1.getValue();
- final Object value2 = extractor.getValue( object2 );
- if ( value1 == null ) {
- return value2 != null;
- }
- return !value1.equals( value2 );
- }
-
public boolean evaluateCachedRight(final VariableContextEntry context,
final Object left) {
final Object value = context.declaration.getExtractor().getValue( left );
@@ -168,6 +158,17 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return !((ObjectVariableContextEntry) context).left.equals( value );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Object value1 = extractor.getValue( object1 );
+ final Object value2 = extractor.getValue( object2 );
+ if ( value1 == null ) {
+ return value2 != null;
+ }
+ return !value1.equals( value2 );
+ }
+
public String toString() {
return "FactTemplate !=";
}
diff --git a/drools-core/src/main/java/org/drools/base/evaluators/FloatFactory.java b/drools-core/src/main/java/org/drools/base/evaluators/FloatFactory.java
index 0d698acc849..5dfd9f950d1 100644
--- a/drools-core/src/main/java/org/drools/base/evaluators/FloatFactory.java
+++ b/drools-core/src/main/java/org/drools/base/evaluators/FloatFactory.java
@@ -89,6 +89,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return ((DoubleVariableContextEntry) context).left == context.extractor.getFloatValue( right );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ // TODO: we are not handling delta right now... maybe we should
+ return extractor.getFloatValue( object1 ) == extractor.getFloatValue( object2 );
+ }
+
public String toString() {
return "Float ==";
}
@@ -125,6 +132,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return ((DoubleVariableContextEntry) context).left != context.extractor.getFloatValue( right );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ // TODO: we are not handling delta right now... maybe we should
+ return extractor.getFloatValue( object1 ) != extractor.getFloatValue( object2 );
+ }
+
public String toString() {
return "Float !=";
}
@@ -161,6 +175,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getFloatValue( right ) < ((DoubleVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ // TODO: we are not handling delta right now... maybe we should
+ return extractor.getFloatValue( object1 ) < extractor.getFloatValue( object2 );
+ }
+
public String toString() {
return "Float <";
}
@@ -197,6 +218,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getFloatValue( right ) <= ((DoubleVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ // TODO: we are not handling delta right now... maybe we should
+ return extractor.getFloatValue( object1 ) <= extractor.getFloatValue( object2 );
+ }
+
public String toString() {
return "Float <=";
}
@@ -233,6 +261,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getFloatValue( right ) > ((DoubleVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ // TODO: we are not handling delta right now... maybe we should
+ return extractor.getFloatValue( object1 ) > extractor.getFloatValue( object2 );
+ }
+
public String toString() {
return "Float >";
}
@@ -269,6 +304,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getFloatValue( right ) >= ((DoubleVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ // TODO: we are not handling delta right now... maybe we should
+ return extractor.getFloatValue( object1 ) >= extractor.getFloatValue( object2 );
+ }
+
public String toString() {
return "Float >=";
}
diff --git a/drools-core/src/main/java/org/drools/base/evaluators/IntegerFactory.java b/drools-core/src/main/java/org/drools/base/evaluators/IntegerFactory.java
index 3fbd6c98cbb..c1ba19fce1a 100644
--- a/drools-core/src/main/java/org/drools/base/evaluators/IntegerFactory.java
+++ b/drools-core/src/main/java/org/drools/base/evaluators/IntegerFactory.java
@@ -88,6 +88,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getIntValue( object2 ) == ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getIntValue( object1 ) == extractor.getIntValue( object2 );
+ }
+
public String toString() {
return "Integer ==";
}
@@ -122,6 +128,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getIntValue( object2 ) != ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getIntValue( object1 ) != extractor.getIntValue( object2 );
+ }
+
public String toString() {
return "Integer !=";
}
@@ -155,6 +167,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getIntValue( right ) < ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getIntValue( object1 ) < extractor.getIntValue( object2 );
+ }
+
public String toString() {
return "Integer <";
}
@@ -188,6 +206,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getIntValue( right ) <= ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getIntValue( object1 ) <= extractor.getIntValue( object2 );
+ }
+
public String toString() {
return "Integer <=";
}
@@ -221,6 +245,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getIntValue( right ) > ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getIntValue( object1 ) > extractor.getIntValue( object2 );
+ }
+
public String toString() {
return "Integer >";
}
@@ -254,6 +284,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getIntValue( right ) >= ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getIntValue( object1 ) >= extractor.getIntValue( object2 );
+ }
+
public String toString() {
return "Integer >=";
}
diff --git a/drools-core/src/main/java/org/drools/base/evaluators/LongFactory.java b/drools-core/src/main/java/org/drools/base/evaluators/LongFactory.java
index e612d57b527..b48381f021b 100644
--- a/drools-core/src/main/java/org/drools/base/evaluators/LongFactory.java
+++ b/drools-core/src/main/java/org/drools/base/evaluators/LongFactory.java
@@ -88,6 +88,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return ((LongVariableContextEntry) context).left == context.extractor.getLongValue( right );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getLongValue( object1 ) == extractor.getLongValue( object2 );
+ }
+
public String toString() {
return "Long ==";
}
@@ -121,6 +127,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return ((LongVariableContextEntry) context).left != context.extractor.getLongValue( right );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getLongValue( object1 ) != extractor.getLongValue( object2 );
+ }
+
public String toString() {
return "Long !=";
}
@@ -154,6 +166,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getLongValue( right ) < ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getLongValue( object1 ) < extractor.getLongValue( object2 );
+ }
+
public String toString() {
return "Long <";
}
@@ -187,6 +205,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getLongValue( right ) <= ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getLongValue( object1 ) <= extractor.getLongValue( object2 );
+ }
+
public String toString() {
return "Long <=";
}
@@ -220,6 +244,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getLongValue( right ) > ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getLongValue( object1 ) > extractor.getLongValue( object2 );
+ }
+
public String toString() {
return "Long >";
}
@@ -253,6 +283,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getLongValue( right ) >= ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getLongValue( object1 ) >= extractor.getLongValue( object2 );
+ }
+
public String toString() {
return "Long >=";
}
diff --git a/drools-core/src/main/java/org/drools/base/evaluators/ObjectFactory.java b/drools-core/src/main/java/org/drools/base/evaluators/ObjectFactory.java
index a387a32554d..93b11b97dc2 100644
--- a/drools-core/src/main/java/org/drools/base/evaluators/ObjectFactory.java
+++ b/drools-core/src/main/java/org/drools/base/evaluators/ObjectFactory.java
@@ -118,6 +118,17 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return ((ObjectVariableContextEntry) context).left.equals( value );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Object value1 = extractor.getValue( object1 );
+ final Object value2 = extractor.getValue( object2 );
+ if ( value1 == null ) {
+ return value2 == null;
+ }
+ return value1.equals( value2 );
+ }
+
public String toString() {
return "Object ==";
}
@@ -165,6 +176,17 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return !((ObjectVariableContextEntry) context).left.equals( value );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Object value1 = extractor.getValue( object1 );
+ final Object value2 = extractor.getValue( object2 );
+ if ( value1 == null ) {
+ return value2 != null;
+ }
+ return !value1.equals( value2 );
+ }
+
public String toString() {
return "Object !=";
}
@@ -198,6 +220,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return comp.compareTo( ((ObjectVariableContextEntry) context).left ) < 0;
}
+ public boolean evaluate(final Extractor extractor,
+ final Object object1,
+ final Object object2) {
+ final Comparable comp = (Comparable) extractor.getValue( object1 );
+ return comp.compareTo( extractor.getValue( object2 ) ) < 0;
+ }
+
public String toString() {
return "Object <";
}
@@ -234,6 +263,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return comp.compareTo( ((ObjectVariableContextEntry) context).left ) <= 0;
}
+ public boolean evaluate(final Extractor extractor,
+ final Object object1,
+ final Object object2) {
+ final Comparable comp = (Comparable) extractor.getValue( object1 );
+ return comp.compareTo( extractor.getValue( object2 ) ) <= 0;
+ }
+
public String toString() {
return "Object <=";
}
@@ -270,6 +306,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return comp.compareTo( ((ObjectVariableContextEntry) context).left ) > 0;
}
+ public boolean evaluate(final Extractor extractor,
+ final Object object1,
+ final Object object2) {
+ final Comparable comp = (Comparable) extractor.getValue( object1 );
+ return comp.compareTo( extractor.getValue( object2 ) ) > 0;
+ }
+
public String toString() {
return "Object >";
}
@@ -306,6 +349,13 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return comp.compareTo( ((ObjectVariableContextEntry) context).left ) >= 0;
}
+ public boolean evaluate(final Extractor extractor,
+ final Object object1,
+ final Object object2) {
+ final Comparable comp = (Comparable) extractor.getValue( object1 );
+ return comp.compareTo( extractor.getValue( object2 ) ) >= 0;
+ }
+
public String toString() {
return "Object >=";
}
@@ -345,6 +395,14 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return col.contains( value );
}
+ public boolean evaluate(final Extractor extractor,
+ final Object object1,
+ final Object object2) {
+ final Object value = extractor.getValue( object2 );
+ final Collection col = (Collection) extractor.getValue( object1 );
+ return col.contains( value );
+ }
+
public String toString() {
return "Object contains";
}
@@ -384,6 +442,14 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return !col.contains( value );
}
+ public boolean evaluate(final Extractor extractor,
+ final Object object1,
+ final Object object2) {
+ final Object value = extractor.getValue( object2 );
+ final Collection col = (Collection) extractor.getValue( object1 );
+ return !col.contains( value );
+ }
+
public String toString() {
return "Object excludes";
}
diff --git a/drools-core/src/main/java/org/drools/base/evaluators/ShortFactory.java b/drools-core/src/main/java/org/drools/base/evaluators/ShortFactory.java
index 349ac21ad6f..8cf3857c0ee 100644
--- a/drools-core/src/main/java/org/drools/base/evaluators/ShortFactory.java
+++ b/drools-core/src/main/java/org/drools/base/evaluators/ShortFactory.java
@@ -88,6 +88,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return ((LongVariableContextEntry) context).left == context.extractor.getShortValue( right );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getShortValue( object1 ) == extractor.getShortValue( object2 );
+ }
+
public String toString() {
return "Short ==";
}
@@ -121,6 +127,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return ((LongVariableContextEntry) context).left != context.extractor.getShortValue( right );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getShortValue( object1 ) != extractor.getShortValue( object2 );
+ }
+
public String toString() {
return "Short !=";
}
@@ -154,6 +166,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getShortValue( right ) < ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getShortValue( object1 ) < extractor.getShortValue( object2 );
+ }
+
public String toString() {
return "Short <";
}
@@ -187,6 +205,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getShortValue( right ) <= ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getShortValue( object1 ) <= extractor.getShortValue( object2 );
+ }
+
public String toString() {
return "Boolean <=";
}
@@ -220,6 +244,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getShortValue( right ) > ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getShortValue( object1 ) > extractor.getShortValue( object2 );
+ }
+
public String toString() {
return "Short >";
}
@@ -253,6 +283,12 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return context.extractor.getShortValue( right ) >= ((LongVariableContextEntry) context).left;
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ return extractor.getShortValue( object1 ) >= extractor.getShortValue( object2 );
+ }
+
public String toString() {
return "Short >=";
}
diff --git a/drools-core/src/main/java/org/drools/base/evaluators/StringFactory.java b/drools-core/src/main/java/org/drools/base/evaluators/StringFactory.java
index 200b416fe05..1c0828f1321 100644
--- a/drools-core/src/main/java/org/drools/base/evaluators/StringFactory.java
+++ b/drools-core/src/main/java/org/drools/base/evaluators/StringFactory.java
@@ -102,9 +102,21 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return ((ObjectVariableContextEntry) context).left.equals( value );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Object value1 = extractor.getValue( object1 );
+ final Object value2 = extractor.getValue( object2 );
+ if ( value1 == null ) {
+ return value2 == null;
+ }
+ return value1.equals( value2 );
+ }
+
public String toString() {
return "String ==";
}
+
}
static class StringNotEqualEvaluator extends BaseEvaluator {
@@ -148,6 +160,17 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return !((ObjectVariableContextEntry) context).left.equals( value );
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Object value1 = extractor.getValue( object1 );
+ final Object value2 = extractor.getValue( object2 );
+ if ( value1 == null ) {
+ return value2 != null;
+ }
+ return !value1.equals( value2 );
+ }
+
public String toString() {
return "String !=";
}
@@ -194,6 +217,17 @@ public boolean evaluateCachedLeft(final VariableContextEntry context,
return value.matches( (String) ((ObjectVariableContextEntry) context).left);
}
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2) {
+ final Object value1 = extractor.getValue( object1 );
+ final Object value2 = extractor.getValue( object2 );
+ if ( value1 == null ) {
+ return false;
+ }
+ return ((String) value1).matches( (String) value2 );
+ }
+
public String toString() {
return "String !=";
}
diff --git a/drools-core/src/main/java/org/drools/common/DefaultBetaConstraints.java b/drools-core/src/main/java/org/drools/common/DefaultBetaConstraints.java
index f96650b940f..03cfde7a75d 100644
--- a/drools-core/src/main/java/org/drools/common/DefaultBetaConstraints.java
+++ b/drools-core/src/main/java/org/drools/common/DefaultBetaConstraints.java
@@ -185,7 +185,8 @@ public BetaMemory createBetaMemory() {
final Constraint constraint = (Constraint) entry.getObject();
final VariableConstraint variableConstraint = (VariableConstraint) constraint;
final FieldIndex index = new FieldIndex( variableConstraint.getFieldExtractor(),
- variableConstraint.getRequiredDeclarations()[0] );
+ variableConstraint.getRequiredDeclarations()[0],
+ variableConstraint.getEvaluator());
list.add( index );
entry = (LinkedListEntry) entry.getNext();
}
diff --git a/drools-core/src/main/java/org/drools/spi/Evaluator.java b/drools-core/src/main/java/org/drools/spi/Evaluator.java
index 898aaa03d3e..f4c188a3ac1 100644
--- a/drools-core/src/main/java/org/drools/spi/Evaluator.java
+++ b/drools-core/src/main/java/org/drools/spi/Evaluator.java
@@ -46,6 +46,10 @@ public interface Evaluator
public boolean evaluate(Extractor extractor,
Object object1,
FieldValue value);
+
+ public boolean evaluate(Extractor extractor,
+ Object object1,
+ Object object2);
public boolean evaluateCachedLeft(VariableContextEntry context,
Object object1);
diff --git a/drools-core/src/main/java/org/drools/util/AbstractHashTable.java b/drools-core/src/main/java/org/drools/util/AbstractHashTable.java
index a815e2ab06d..de1c42baedb 100644
--- a/drools-core/src/main/java/org/drools/util/AbstractHashTable.java
+++ b/drools-core/src/main/java/org/drools/util/AbstractHashTable.java
@@ -8,6 +8,7 @@
import org.drools.common.InternalFactHandle;
import org.drools.reteoo.ReteTuple;
import org.drools.rule.Declaration;
+import org.drools.spi.Evaluator;
import org.drools.spi.FieldExtractor;
public abstract class AbstractHashTable
@@ -72,7 +73,7 @@ protected void resize(final int newCapacity) {
next = entry.getNext();
final int index = indexOf( entry.hashCode(),
- newTable.length );
+ newTable.length );
entry.setNext( newTable[index] );
newTable[index] = entry;
@@ -179,7 +180,9 @@ protected int indexOf(final int hashCode,
public abstract Entry getBucket(Object object);
- public interface ObjectComparator extends Serializable {
+ public interface ObjectComparator
+ extends
+ Serializable {
public int hashCodeOf(Object object);
public int rehash(int hashCode);
@@ -242,6 +245,8 @@ public void reset() {
public static class InstanceEquals
implements
ObjectComparator {
+
+ private static final long serialVersionUID = 1835792402650440794L;
public static ObjectComparator INSTANCE = new InstanceEquals();
public static ObjectComparator getInstance() {
@@ -273,6 +278,8 @@ public boolean equal(final Object object1,
public static class EqualityEquals
implements
ObjectComparator {
+
+ private static final long serialVersionUID = 8004812231695147987L;
public static ObjectComparator INSTANCE = new EqualityEquals();
public static ObjectComparator getInstance() {
@@ -297,9 +304,9 @@ private EqualityEquals() {
public boolean equal(final Object object1,
final Object object2) {
- if ( object1 == null ) {
- return object2 == null;
- }
+ if ( object1 == null ) {
+ return object2 == null;
+ }
return object1.equals( object2 );
}
}
@@ -307,6 +314,9 @@ public boolean equal(final Object object1,
public static class FactEntry
implements
Entry {
+
+ private static final long serialVersionUID = 1776798977330980128L;
+
public InternalFactHandle handle;
public int hashCode;
@@ -356,13 +366,15 @@ public boolean equals(final Object object) {
return (object == this) || (this.handle == ((FactEntry) object).handle);
}
}
-
+
public static class FieldIndex {
public FieldExtractor extractor;
public Declaration declaration;
+ public Evaluator evaluator;
public FieldIndex(final FieldExtractor extractor,
- final Declaration declaration) {
+ final Declaration declaration,
+ final Evaluator evaluator) {
super();
this.extractor = extractor;
this.declaration = declaration;
@@ -375,8 +387,12 @@ public Declaration getDeclaration() {
public FieldExtractor getExtractor() {
return this.extractor;
}
+
+ public Evaluator getEvaluator() {
+ return this.evaluator;
+ }
}
-
+
public static interface Index {
public int hashCodeOf(ReteTuple tuple);
@@ -387,200 +403,176 @@ public boolean equal(Object object,
public boolean equal(ReteTuple tuple1,
ReteTuple tuple2);
-
+
public boolean equal(Object object1,
- Object object2);
+ Object object2);
}
public static class SingleIndex
implements
Index {
- private FieldExtractor extractor;
- private Declaration declaration;
- private int startResult;
+ private FieldExtractor extractor;
+ private Declaration declaration;
+ private Evaluator evaluator;
- private ObjectComparator comparator;
+ private int startResult;
public SingleIndex(final FieldIndex[] indexes,
- final int startResult,
- final ObjectComparator comparator) {
+ final int startResult) {
this.startResult = startResult;
this.extractor = indexes[0].extractor;
this.declaration = indexes[0].declaration;
+ this.evaluator = indexes[0].evaluator;
- this.comparator = comparator;
}
public int hashCodeOf(final Object object) {
int hashCode = this.startResult;
hashCode = TupleIndexHashTable.PRIME * hashCode + this.extractor.getHashCode( object );
- return this.comparator.rehash( hashCode );
+ return rehash( hashCode );
}
public int hashCodeOf(final ReteTuple tuple) {
int hashCode = this.startResult;
hashCode = TupleIndexHashTable.PRIME * hashCode + this.declaration.getHashCode( tuple.get( this.declaration ).getObject() );
- return this.comparator.rehash( hashCode );
+ return rehash( hashCode );
}
public boolean equal(final Object object1,
final ReteTuple tuple) {
- final Object value1 = this.extractor.getValue( object1 );
- final Object value2 = this.declaration.getValue( tuple.get( this.declaration ).getObject() );
+ final Object object2 = tuple.get( this.declaration ).getObject();
- return this.comparator.equal( value1,
- value2 );
+ return this.evaluator.evaluate( this.extractor,
+ object1,
+ object2 );
}
-
+
public boolean equal(final Object object1,
final Object object2) {
- final Object value1 = this.extractor.getValue( object1 );
- final Object value2 = this.extractor.getValue( object2 );
-
- return this.comparator.equal( value1,
- value2 );
- }
+ return this.evaluator.evaluate( this.extractor,
+ object1,
+ object2 );
+ }
public boolean equal(final ReteTuple tuple1,
final ReteTuple tuple2) {
- final Object value1 = this.declaration.getValue( tuple1.get( this.declaration ).getObject() );
- final Object value2 = this.declaration.getValue( tuple2.get( this.declaration ).getObject() );
- return this.comparator.equal( value1,
- value2 );
+ final Object object1 = tuple1.get( this.declaration ).getObject();
+ final Object object2 = tuple2.get( this.declaration ).getObject();
+ return this.evaluator.evaluate( this.extractor,
+ object1,
+ object2 );
}
+
+ public int rehash(int h) {
+ h += ~(h << 9);
+ h ^= (h >>> 14);
+ h += (h << 4);
+ h ^= (h >>> 10);
+ return h;
+ }
+
}
public static class DoubleCompositeIndex
implements
Index {
- private FieldIndex index0;
- private FieldIndex index1;
-
- private int startResult;
+ private FieldIndex index0;
+ private FieldIndex index1;
- private ObjectComparator comparator;
+ private int startResult;
public DoubleCompositeIndex(final FieldIndex[] indexes,
- final int startResult,
- final ObjectComparator comparator) {
+ final int startResult) {
this.startResult = startResult;
this.index0 = indexes[0];
this.index1 = indexes[1];
- this.comparator = comparator;
}
public int hashCodeOf(final Object object) {
int hashCode = this.startResult;
- int hash = this.index0.extractor.getHashCode( object );
- hashCode = TupleIndexHashTable.PRIME * hashCode + hash;
-
- hash = this.index1.extractor.getHashCode( object );
- hashCode = TupleIndexHashTable.PRIME * hashCode + hash;
+ hashCode = TupleIndexHashTable.PRIME * hashCode + this.index0.extractor.getHashCode( object );
+ hashCode = TupleIndexHashTable.PRIME * hashCode + this.index1.extractor.getHashCode( object );
- return this.comparator.rehash( hashCode );
+ return rehash( hashCode );
}
public int hashCodeOf(final ReteTuple tuple) {
int hashCode = this.startResult;
hashCode = TupleIndexHashTable.PRIME * hashCode + this.index0.declaration.getHashCode( tuple.get( this.index0.declaration ).getObject() );
-
hashCode = TupleIndexHashTable.PRIME * hashCode + this.index1.declaration.getHashCode( tuple.get( this.index1.declaration ).getObject() );
- return this.comparator.rehash( hashCode );
+ return rehash( hashCode );
}
public boolean equal(final Object object1,
final ReteTuple tuple) {
- Object value1 = this.index0.extractor.getValue( object1 );
- Object value2 = this.index0.declaration.getValue( tuple.get( this.index0.declaration ).getObject() );
-
- if ( !this.comparator.equal( value1,
- value2 ) ) {
- return false;
- }
-
- value1 = this.index1.extractor.getValue( object1 );
- value2 = this.index1.declaration.getValue( tuple.get( this.index1.declaration ).getObject() );
-
- if ( !this.comparator.equal( value1,
- value2 ) ) {
- return false;
- }
+ Object object12 = tuple.get( this.index0.declaration ).getObject();
+ Object object22 = tuple.get( this.index1.declaration ).getObject();
- return true;
+ return this.index0.evaluator.evaluate( this.index0.extractor,
+ object1,
+ object12 ) && this.index1.evaluator.evaluate( this.index1.extractor,
+ object1,
+ object22 );
}
public boolean equal(final ReteTuple tuple1,
final ReteTuple tuple2) {
- Object value1 = this.index0.declaration.getValue( tuple1.get( this.index0.declaration ).getObject() );
- Object value2 = this.index0.declaration.getValue( tuple2.get( this.index0.declaration ).getObject() );
+ Object object11 = tuple1.get( this.index0.declaration ).getObject();
+ Object object12 = tuple2.get( this.index0.declaration ).getObject();
- if ( !this.comparator.equal( value1,
- value2 ) ) {
- return false;
- }
-
- value1 = this.index1.declaration.getValue( tuple1.get( this.index1.declaration ).getObject() );
- value2 = this.index1.declaration.getValue( tuple2.get( this.index1.declaration ).getObject() );
+ Object object21 = tuple1.get( this.index1.declaration ).getObject();
+ Object object22 = tuple2.get( this.index1.declaration ).getObject();
- if ( !this.comparator.equal( value1,
- value2 ) ) {
- return false;
- }
-
- return true;
+ return this.index0.evaluator.evaluate( this.index0.extractor,
+ object11,
+ object12 ) && this.index1.evaluator.evaluate( this.index1.extractor,
+ object21,
+ object22 );
}
-
+
public boolean equal(final Object object1,
final Object object2) {
- Object value1 = this.index0.extractor.getValue( object1 );
- Object value2 = this.index0.extractor.getValue( object2 );
-
- if ( !this.comparator.equal( value1,
- value2 ) ) {
- return false;
- }
-
- value1 = this.index1.extractor.getValue( object1 );
- value2 = this.index1.extractor.getValue( object2 );
-
- if ( !this.comparator.equal( value1,
- value2 ) ) {
- return false;
- }
+ return this.index0.evaluator.evaluate( this.index0.extractor,
+ object1,
+ object2 ) && this.index1.evaluator.evaluate( this.index1.extractor,
+ object1,
+ object2 );
+ }
- return true;
- }
+ public int rehash(int h) {
+ h += ~(h << 9);
+ h ^= (h >>> 14);
+ h += (h << 4);
+ h ^= (h >>> 10);
+ return h;
+ }
}
public static class TripleCompositeIndex
implements
Index {
- private FieldIndex index0;
- private FieldIndex index1;
- private FieldIndex index2;
+ private FieldIndex index0;
+ private FieldIndex index1;
+ private FieldIndex index2;
private int startResult;
- private ObjectComparator comparator;
-
public TripleCompositeIndex(final FieldIndex[] indexes,
- final int startResult,
- final ObjectComparator comparator) {
+ final int startResult) {
this.startResult = startResult;
this.index0 = indexes[0];
this.index1 = indexes[1];
this.index2 = indexes[2];
- this.comparator = comparator;
}
public int hashCodeOf(final Object object) {
@@ -590,7 +582,7 @@ public int hashCodeOf(final Object object) {
hashCode = TupleIndexHashTable.PRIME * hashCode + this.index1.extractor.getHashCode( object );;
hashCode = TupleIndexHashTable.PRIME * hashCode + this.index2.extractor.getHashCode( object );;
- return this.comparator.rehash( hashCode );
+ return rehash( hashCode );
}
public int hashCodeOf(final ReteTuple tuple) {
@@ -600,94 +592,60 @@ public int hashCodeOf(final ReteTuple tuple) {
hashCode = TupleIndexHashTable.PRIME * hashCode + this.index1.declaration.getHashCode( tuple.get( this.index1.declaration ).getObject() );
hashCode = TupleIndexHashTable.PRIME * hashCode + this.index2.declaration.getHashCode( tuple.get( this.index2.declaration ).getObject() );
- return this.comparator.rehash( hashCode );
+ return rehash( hashCode );
}
public boolean equal(final Object object1,
final ReteTuple tuple) {
- Object value1 = this.index0.extractor.getValue( object1 );
- Object value2 = this.index0.declaration.getValue( tuple.get( this.index0.declaration ).getObject() );
-
- if ( !this.comparator.equal( value1,
- value2 ) ) {
- return false;
- }
+ Object object12 = tuple.get( this.index0.declaration ).getObject();
+ Object object22 = tuple.get( this.index1.declaration ).getObject();
+ Object object32 = tuple.get( this.index2.declaration ).getObject();
- value1 = this.index1.extractor.getValue( object1 );
- value2 = this.index1.declaration.getValue( tuple.get( this.index1.declaration ).getObject() );
-
- if ( !this.comparator.equal( value1,
- value2 ) ) {
- return false;
- }
-
- value1 = this.index2.extractor.getValue( object1 );
- value2 = this.index2.declaration.getValue( tuple.get( this.index2.declaration ).getObject() );
-
- if ( !this.comparator.equal( value1,
- value2 ) ) {
- return false;
- }
-
- return true;
+ return this.index0.evaluator.evaluate( this.index0.extractor,
+ object1,
+ object12 ) && this.index1.evaluator.evaluate( this.index1.extractor,
+ object1,
+ object22 ) && this.index2.evaluator.evaluate( this.index2.extractor,
+ object1,
+ object32 );
}
public boolean equal(final ReteTuple tuple1,
final ReteTuple tuple2) {
- Object value1 = this.index0.declaration.getValue( tuple1.get( this.index0.declaration ).getObject() );
- Object value2 = this.index0.declaration.getValue( tuple2.get( this.index0.declaration ).getObject() );
-
- if ( !this.comparator.equal( value1,
- value2 ) ) {
- return false;
- }
-
- value1 = this.index1.declaration.getValue( tuple1.get( this.index1.declaration ).getObject() );
- value2 = this.index1.declaration.getValue( tuple2.get( this.index1.declaration ).getObject() );
-
- if ( !this.comparator.equal( value1,
- value2 ) ) {
- return false;
- }
-
- value1 = this.index2.declaration.getValue( tuple1.get( this.index2.declaration ).getObject() );
- value2 = this.index2.declaration.getValue( tuple2.get( this.index2.declaration ).getObject() );
-
- if ( !this.comparator.equal( value1,
- value2 ) ) {
- return false;
- }
-
- return true;
+ Object object11 = tuple1.get( this.index0.declaration ).getObject();
+ Object object12 = tuple2.get( this.index0.declaration ).getObject();
+ Object object21 = tuple1.get( this.index1.declaration ).getObject();
+ Object object22 = tuple2.get( this.index1.declaration ).getObject();
+ Object object31 = tuple1.get( this.index2.declaration ).getObject();
+ Object object32 = tuple2.get( this.index2.declaration ).getObject();
+
+ return this.index0.evaluator.evaluate( this.index0.extractor,
+ object11,
+ object12 ) && this.index1.evaluator.evaluate( this.index1.extractor,
+ object21,
+ object22 ) && this.index2.evaluator.evaluate( this.index2.extractor,
+ object31,
+ object32 );
}
-
+
public boolean equal(final Object object1,
final Object object2) {
- Object value1 = this.index0.extractor.getValue( object1 );
- Object value2 = this.index0.extractor.getValue( object2 );
-
- if ( !this.comparator.equal( value1,
- value2 ) ) {
- return false;
- }
-
- value1 = this.index1.extractor.getValue( object1 );
- value2 = this.index1.extractor.getValue( object2 );
-
- if ( !this.comparator.equal( value1,
- value2 ) ) {
- return false;
- }
-
- value1 = this.index2.extractor.getValue( object1 );
- value2 = this.index2.extractor.getValue( object2 );
+ return this.index0.evaluator.evaluate( this.index0.extractor,
+ object1,
+ object2 ) && this.index1.evaluator.evaluate( this.index1.extractor,
+ object1,
+ object2 ) && this.index2.evaluator.evaluate( this.index2.extractor,
+ object1,
+ object2 );
+ }
- if ( !this.comparator.equal( value1,
- value2 ) ) {
- return false;
- }
+ public int rehash(int h) {
+ h += ~(h << 9);
+ h ^= (h >>> 14);
+ h += (h << 4);
+ h ^= (h >>> 10);
+ return h;
+ }
- return true;
- }
- }
+ }
}
\ No newline at end of file
diff --git a/drools-core/src/main/java/org/drools/util/FactHandleIndexHashTable.java b/drools-core/src/main/java/org/drools/util/FactHandleIndexHashTable.java
index a182bd7a6be..828d7e65c60 100644
--- a/drools-core/src/main/java/org/drools/util/FactHandleIndexHashTable.java
+++ b/drools-core/src/main/java/org/drools/util/FactHandleIndexHashTable.java
@@ -6,13 +6,13 @@
import org.drools.common.InternalFactHandle;
import org.drools.reteoo.FactHandleMemory;
import org.drools.reteoo.ReteTuple;
-import org.drools.rule.Declaration;
-import org.drools.spi.FieldExtractor;
-import org.drools.util.ObjectHashMap.ObjectEntry;
public class FactHandleIndexHashTable extends AbstractHashTable
implements
FactHandleMemory {
+
+ private static final long serialVersionUID = -6033183838054653227L;
+
public static final int PRIME = 31;
private int startResult;
@@ -37,7 +37,7 @@ public FactHandleIndexHashTable(final int capacity,
this.startResult = FactHandleIndexHashTable.PRIME;
for ( int i = 0, length = index.length; i < length; i++ ) {
- this.startResult += FactHandleIndexHashTable.PRIME * this.startResult + index[i].getExtractor().getIndex();
+ this.startResult = FactHandleIndexHashTable.PRIME * this.startResult + index[i].getExtractor().getIndex();
}
switch ( index.length ) {
@@ -45,18 +45,15 @@ public FactHandleIndexHashTable(final int capacity,
throw new IllegalArgumentException( "FieldIndexHashTable cannot use an index[] of length 0" );
case 1 :
this.index = new SingleIndex( index,
- this.startResult,
- this.comparator );
+ this.startResult);
break;
case 2 :
this.index = new DoubleCompositeIndex( index,
- this.startResult,
- this.comparator );
+ this.startResult );
break;
case 3 :
this.index = new TripleCompositeIndex( index,
- this.startResult,
- this.comparator );
+ this.startResult );
break;
default :
throw new IllegalArgumentException( "FieldIndexHashTable cannot use an index[] of length great than 3" );
@@ -250,6 +247,8 @@ public int size() {
public static class FieldIndexEntry
implements
Entry {
+
+ private static final long serialVersionUID = -577270475161063671L;
private Entry next;
private FactEntry first;
private final int hashCode;
diff --git a/drools-core/src/main/java/org/drools/util/TupleIndexHashTable.java b/drools-core/src/main/java/org/drools/util/TupleIndexHashTable.java
index 79353043490..c78960acaf1 100644
--- a/drools-core/src/main/java/org/drools/util/TupleIndexHashTable.java
+++ b/drools-core/src/main/java/org/drools/util/TupleIndexHashTable.java
@@ -4,17 +4,15 @@
package org.drools.util;
import org.drools.common.InternalFactHandle;
-import org.drools.reteoo.FactHandleMemory;
import org.drools.reteoo.ReteTuple;
import org.drools.reteoo.TupleMemory;
-import org.drools.rule.Column;
-import org.drools.rule.Declaration;
-import org.drools.spi.FieldExtractor;
-import org.drools.util.ObjectHashMap.ObjectEntry;
public class TupleIndexHashTable extends AbstractHashTable
implements
TupleMemory {
+
+ private static final long serialVersionUID = -6214772340195061306L;
+
public static final int PRIME = 31;
private int startResult;
@@ -47,18 +45,15 @@ public TupleIndexHashTable(final int capacity,
throw new IllegalArgumentException( "FieldIndexHashTable cannot use an index[] of length 0" );
case 1 :
this.index = new SingleIndex( index,
- this.startResult,
- this.comparator );
+ this.startResult);
break;
case 2 :
this.index = new DoubleCompositeIndex( index,
- this.startResult,
- this.comparator );
+ this.startResult);
break;
case 3 :
this.index = new TripleCompositeIndex( index,
- this.startResult,
- this.comparator );
+ this.startResult);
break;
default :
throw new IllegalArgumentException( "FieldIndexHashTable cannot use an index[] of length great than 3" );
@@ -243,6 +238,8 @@ public int size() {
public static class FieldIndexEntry
implements
Entry {
+
+ private static final long serialVersionUID = 8160842495541574574L;
private Entry next;
private ReteTuple first;
private final int hashCode;
|
f79cf4149fe680ff36117277ffb45d4f2e53c33d
|
elasticsearch
|
remove unused code--
|
p
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/main/java/org/elasticsearch/gateway/GatewayService.java b/src/main/java/org/elasticsearch/gateway/GatewayService.java
index 75b7f9e1aa857..4768333193041 100644
--- a/src/main/java/org/elasticsearch/gateway/GatewayService.java
+++ b/src/main/java/org/elasticsearch/gateway/GatewayService.java
@@ -24,7 +24,10 @@
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.block.ClusterBlocks;
-import org.elasticsearch.cluster.metadata.*;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.cluster.metadata.IndexTemplateMetaData;
+import org.elasticsearch.cluster.metadata.MetaData;
+import org.elasticsearch.cluster.metadata.MetaDataStateIndexService;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.RoutingTable;
@@ -62,8 +65,6 @@ public class GatewayService extends AbstractLifecycleComponent<GatewayService> i
private final DiscoveryService discoveryService;
- private final MetaDataCreateIndexService createIndexService;
-
private final TimeValue recoverAfterTime;
private final int recoverAfterNodes;
private final int expectedNodes;
@@ -77,13 +78,12 @@ public class GatewayService extends AbstractLifecycleComponent<GatewayService> i
private final AtomicBoolean scheduledRecovery = new AtomicBoolean();
@Inject
- public GatewayService(Settings settings, Gateway gateway, AllocationService allocationService, ClusterService clusterService, DiscoveryService discoveryService, MetaDataCreateIndexService createIndexService, ThreadPool threadPool) {
+ public GatewayService(Settings settings, Gateway gateway, AllocationService allocationService, ClusterService clusterService, DiscoveryService discoveryService, ThreadPool threadPool) {
super(settings);
this.gateway = gateway;
this.allocationService = allocationService;
this.clusterService = clusterService;
this.discoveryService = discoveryService;
- this.createIndexService = createIndexService;
this.threadPool = threadPool;
// allow to control a delay of when indices will get created
this.recoverAfterTime = componentSettings.getAsTime("recover_after_time", null);
|
d0994540cd94cb3803dba57cb127727d4b8a67ff
|
intellij-community
|
Don't offer to download source by maven for- artifact with 'system' scope--
|
p
|
https://github.com/JetBrains/intellij-community
|
diff --git a/plugins/maven/src/main/java/org/jetbrains/idea/maven/utils/MavenAttachSourcesProvider.java b/plugins/maven/src/main/java/org/jetbrains/idea/maven/utils/MavenAttachSourcesProvider.java
index ba43f06034739..3b1a24863349f 100644
--- a/plugins/maven/src/main/java/org/jetbrains/idea/maven/utils/MavenAttachSourcesProvider.java
+++ b/plugins/maven/src/main/java/org/jetbrains/idea/maven/utils/MavenAttachSourcesProvider.java
@@ -118,7 +118,9 @@ private static Collection<MavenArtifact> findArtifacts(Collection<MavenProject>
for (MavenProject each : mavenProjects) {
for (LibraryOrderEntry entry : orderEntries) {
final MavenArtifact artifact = MavenRootModelAdapter.findArtifact(each, entry.getLibrary());
- if (artifact != null) artifacts.add(artifact);
+ if (artifact != null && !"system".equals(artifact.getScope())) {
+ artifacts.add(artifact);
+ }
}
}
return artifacts;
|
d8fdf6abed4031adfa24a35ab23b78185cad6fc1
|
drools
|
[DROOLS-991] allow to configure maxEventsInMemory- in FileLogger--
|
a
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/test/java/org/drools/compiler/integrationtests/KieLoggersTest.java b/drools-compiler/src/test/java/org/drools/compiler/integrationtests/KieLoggersTest.java
index 16a433a6708..be9896a32e3 100644
--- a/drools-compiler/src/test/java/org/drools/compiler/integrationtests/KieLoggersTest.java
+++ b/drools-compiler/src/test/java/org/drools/compiler/integrationtests/KieLoggersTest.java
@@ -15,12 +15,6 @@
package org.drools.compiler.integrationtests;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.io.File;
-
import org.drools.compiler.Message;
import org.junit.Test;
import org.kie.api.KieServices;
@@ -31,12 +25,16 @@
import org.kie.api.builder.model.KieSessionModel;
import org.kie.api.event.rule.AfterMatchFiredEvent;
import org.kie.api.event.rule.AgendaEventListener;
-import org.kie.api.runtime.KieContainer;
-import org.kie.internal.io.ResourceFactory;
import org.kie.api.io.Resource;
import org.kie.api.logger.KieRuntimeLogger;
+import org.kie.api.runtime.KieContainer;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.StatelessKieSession;
+import org.kie.internal.io.ResourceFactory;
+
+import java.io.File;
+
+import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
public class KieLoggersTest {
@@ -186,6 +184,45 @@ public void testKieFileLogger() throws Exception {
file = new File( fileName+".log" );
assertTrue( file.exists() );
+ assertTrue( file.length() > 0 );
+ file.delete();
+ }
+
+ @Test
+ public void testKieFileLoggerWithImmediateFlushing() throws Exception {
+ // DROOLS-991
+ String drl = "package org.drools.integrationtests\n" +
+ "import org.drools.compiler.Message;\n" +
+ "rule \"Hello World\"\n" +
+ " when\n" +
+ " m : Message( myMessage : message )\n" +
+ " then\n" +
+ "end";
+ // get the resource
+ Resource dt = ResourceFactory.newByteArrayResource(drl.getBytes()).setTargetPath( "org/drools/integrationtests/hello.drl" );
+
+ // create the builder
+ KieSession ksession = getKieSession(dt);
+
+ String fileName = "testKieFileLogger";
+ File file = new File(fileName+".log");
+ if( file.exists() ) {
+ file.delete();
+ }
+
+ // Setting maxEventsInMemory to 0 makes all events to be immediately flushed to the file
+ KieRuntimeLogger logger = KieServices.Factory.get().getLoggers().newFileLogger( ksession, fileName, 0 );
+
+ ksession.insert(new Message("Hello World"));
+ int fired = ksession.fireAllRules();
+ assertEquals( 1, fired );
+
+ // check that the file has been populated before closing it
+ file = new File( fileName+".log" );
+ assertTrue( file.exists() );
+ assertTrue( file.length() > 0 );
+
+ logger.close();
file.delete();
}
diff --git a/drools-core/src/main/java/org/drools/core/audit/KnowledgeRuntimeLoggerProviderImpl.java b/drools-core/src/main/java/org/drools/core/audit/KnowledgeRuntimeLoggerProviderImpl.java
index 99fc0618ba4..70b2b6036f8 100644
--- a/drools-core/src/main/java/org/drools/core/audit/KnowledgeRuntimeLoggerProviderImpl.java
+++ b/drools-core/src/main/java/org/drools/core/audit/KnowledgeRuntimeLoggerProviderImpl.java
@@ -31,7 +31,14 @@ public class KnowledgeRuntimeLoggerProviderImpl
public KnowledgeRuntimeLogger newFileLogger(KieRuntimeEventManager session,
String fileName) {
+ return newFileLogger(session, fileName, WorkingMemoryFileLogger.DEFAULT_MAX_EVENTS_IN_MEMORY);
+ }
+
+ public KnowledgeRuntimeLogger newFileLogger(KieRuntimeEventManager session,
+ String fileName,
+ int maxEventsInMemory) {
WorkingMemoryFileLogger logger = new WorkingMemoryFileLogger( (KnowledgeRuntimeEventManager) session );
+ logger.setMaxEventsInMemory( maxEventsInMemory );
if ( fileName != null ) {
logger.setFileName(fileName);
}
diff --git a/drools-core/src/main/java/org/drools/core/audit/WorkingMemoryFileLogger.java b/drools-core/src/main/java/org/drools/core/audit/WorkingMemoryFileLogger.java
index 92a839e8d4d..493c090e2bf 100644
--- a/drools-core/src/main/java/org/drools/core/audit/WorkingMemoryFileLogger.java
+++ b/drools-core/src/main/java/org/drools/core/audit/WorkingMemoryFileLogger.java
@@ -16,9 +16,16 @@
package org.drools.core.audit;
+import com.thoughtworks.xstream.XStream;
+import org.drools.core.WorkingMemory;
+import org.drools.core.audit.event.LogEvent;
+import org.drools.core.util.IoUtils;
+import org.kie.internal.event.KnowledgeRuntimeEventManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
-import java.io.FileWriter;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
@@ -27,19 +34,6 @@
import java.util.ArrayList;
import java.util.List;
-import org.drools.core.WorkingMemory;
-import org.drools.core.audit.event.LogEvent;
-import org.drools.core.util.IoUtils;
-import org.kie.api.event.rule.AgendaGroupPoppedEvent;
-import org.kie.api.event.rule.AgendaGroupPushedEvent;
-import org.kie.api.event.rule.RuleFlowGroupActivatedEvent;
-import org.kie.api.event.rule.RuleFlowGroupDeactivatedEvent;
-import org.kie.internal.event.KnowledgeRuntimeEventManager;
-
-import com.thoughtworks.xstream.XStream;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
/**
* A logger of events generated by a working memory. It stores its information
* in a file that can be specified. All the events logged are written to the
@@ -55,9 +49,11 @@ public class WorkingMemoryFileLogger extends WorkingMemoryLogger {
protected static final transient Logger logger = LoggerFactory.getLogger(WorkingMemoryFileLogger.class);
+ public static final int DEFAULT_MAX_EVENTS_IN_MEMORY = 1000;
+
private List<LogEvent> events = new ArrayList<LogEvent>();
private String fileName = "event";
- private int maxEventsInMemory = 1000;
+ private int maxEventsInMemory = DEFAULT_MAX_EVENTS_IN_MEMORY;
private int nbOfFile = 0;
private boolean split = true;
private boolean initialized = false;
|
93ab055d17bf4663c439424a40a053d7b0255aa7
|
kotlin
|
Change Signature: Do not fail on unresolved- PsiMethod -KT-9535 Fixed--
|
c
|
https://github.com/JetBrains/kotlin
|
diff --git a/idea/src/org/jetbrains/kotlin/idea/refactoring/changeSignature/JetChangeSignatureUsageProcessor.java b/idea/src/org/jetbrains/kotlin/idea/refactoring/changeSignature/JetChangeSignatureUsageProcessor.java
index c566ef20f2a96..f6b990ffd6901 100644
--- a/idea/src/org/jetbrains/kotlin/idea/refactoring/changeSignature/JetChangeSignatureUsageProcessor.java
+++ b/idea/src/org/jetbrains/kotlin/idea/refactoring/changeSignature/JetChangeSignatureUsageProcessor.java
@@ -442,7 +442,7 @@ private static void findSAMUsages(ChangeInfo changeInfo, Set<UsageInfo> result)
if (((PsiMethod) method).getContainingClass() == null) return;
FunctionDescriptor methodDescriptor = JavaResolutionUtils.getJavaMethodDescriptor((PsiMethod) method);
- assert methodDescriptor != null;
+ if (methodDescriptor == null) return;
DeclarationDescriptor containingDescriptor = methodDescriptor.getContainingDeclaration();
if (!(containingDescriptor instanceof JavaClassDescriptor)) return;
|
f64233fb2d9bcb77e9249d3bc497fd9d110e6a9f
|
ReactiveX-RxJava
|
Add Single.fromCallable()--
|
a
|
https://github.com/ReactiveX/RxJava
|
diff --git a/src/main/java/rx/Single.java b/src/main/java/rx/Single.java
index 4324d32acf..3701d93189 100644
--- a/src/main/java/rx/Single.java
+++ b/src/main/java/rx/Single.java
@@ -12,6 +12,7 @@
*/
package rx;
+import java.util.concurrent.Callable;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
@@ -605,6 +606,43 @@ public final static <T> Single<T> from(Future<? extends T> future, Scheduler sch
return new Single<T>(OnSubscribeToObservableFuture.toObservableFuture(future)).subscribeOn(scheduler);
}
+ /**
+ * Returns a {@link Single} that invokes passed function and emits its result for each new Observer that subscribes.
+ * <p>
+ * Allows you to defer execution of passed function until Observer subscribes to the {@link Single}.
+ * It makes passed function "lazy".
+ * Result of the function invocation will be emitted by the {@link Single}.
+ * <dl>
+ * <dt><b>Scheduler:</b></dt>
+ * <dd>{@code fromCallable} does not operate by default on a particular {@link Scheduler}.</dd>
+ * </dl>
+ *
+ * @param func
+ * function which execution should be deferred, it will be invoked when Observer will subscribe to the {@link Single}.
+ * @param <T>
+ * the type of the item emitted by the {@link Single}.
+ * @return a {@link Single} whose {@link Observer}s' subscriptions trigger an invocation of the given function.
+ */
+ @Experimental
+ public static <T> Single<T> fromCallable(final Callable<? extends T> func) {
+ return create(new OnSubscribe<T>() {
+ @Override
+ public void call(SingleSubscriber<? super T> singleSubscriber) {
+ final T value;
+
+ try {
+ value = func.call();
+ } catch (Throwable t) {
+ Exceptions.throwIfFatal(t);
+ singleSubscriber.onError(t);
+ return;
+ }
+
+ singleSubscriber.onSuccess(value);
+ }
+ });
+ }
+
/**
* Returns a {@code Single} that emits a specified item.
* <p>
diff --git a/src/test/java/rx/SingleTest.java b/src/test/java/rx/SingleTest.java
index 7d8fe2dc22..f78151b094 100644
--- a/src/test/java/rx/SingleTest.java
+++ b/src/test/java/rx/SingleTest.java
@@ -20,8 +20,10 @@
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyZeroInteractions;
+import static org.mockito.Mockito.when;
import java.util.Arrays;
+import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
@@ -530,4 +532,42 @@ public void doOnErrorShouldThrowCompositeExceptionIfOnErrorActionThrows() {
verify(action).call(error);
}
+
+ @Test
+ public void shouldEmitValueFromCallable() throws Exception {
+ Callable<String> callable = mock(Callable.class);
+
+ when(callable.call()).thenReturn("value");
+
+ TestSubscriber<String> testSubscriber = new TestSubscriber<String>();
+
+ Single
+ .fromCallable(callable)
+ .subscribe(testSubscriber);
+
+ testSubscriber.assertValue("value");
+ testSubscriber.assertNoErrors();
+
+ verify(callable).call();
+ }
+
+ @Test
+ public void shouldPassErrorFromCallable() throws Exception {
+ Callable<String> callable = mock(Callable.class);
+
+ Throwable error = new IllegalStateException();
+
+ when(callable.call()).thenThrow(error);
+
+ TestSubscriber<String> testSubscriber = new TestSubscriber<String>();
+
+ Single
+ .fromCallable(callable)
+ .subscribe(testSubscriber);
+
+ testSubscriber.assertNoValues();
+ testSubscriber.assertError(error);
+
+ verify(callable).call();
+ }
}
|
959b0d84b4ee762461d47324cd2d4d00a3e49c0f
|
ReactiveX-RxJava
|
made WINDOW_MAKER typed--
|
p
|
https://github.com/ReactiveX/RxJava
|
diff --git a/rxjava-core/src/main/java/rx/operators/OperationWindow.java b/rxjava-core/src/main/java/rx/operators/OperationWindow.java
index 0278fbb4d8..0a24937f3b 100644
--- a/rxjava-core/src/main/java/rx/operators/OperationWindow.java
+++ b/rxjava-core/src/main/java/rx/operators/OperationWindow.java
@@ -43,12 +43,14 @@
public final class OperationWindow extends ChunkedOperation {
- public static final Func0 WINDOW_MAKER = new Func0() {
- @Override
- public Object call() {
- return new Window();
- }
- };
+ public static <T> Func0<Window<T>> windowMaker() {
+ return new Func0<Window<T>>() {
+ @Override
+ public Window<T> call() {
+ return new Window<T>();
+ }
+ };
+ }
/**
* <p>This method creates a {@link rx.util.functions.Func1} object which represents the window operation. This operation takes
@@ -74,7 +76,7 @@ public static <T> OnSubscribeFunc<Observable<T>> window(final Observable<T> sour
return new OnSubscribeFunc<Observable<T>>() {
@Override
public Subscription onSubscribe(final Observer<? super Observable<T>> observer) {
- NonOverlappingChunks<T, Observable<T>> windows = new NonOverlappingChunks<T, Observable<T>>(observer, WINDOW_MAKER);
+ NonOverlappingChunks<T, Observable<T>> windows = new NonOverlappingChunks<T, Observable<T>>(observer, windowMaker());
ChunkCreator creator = new ObservableBasedSingleChunkCreator<T, Observable<T>>(windows, windowClosingSelector);
return source.subscribe(new ChunkObserver<T, Observable<T>>(windows, observer, creator));
}
@@ -111,7 +113,7 @@ public static <T> OnSubscribeFunc<Observable<T>> window(final Observable<T> sour
return new OnSubscribeFunc<Observable<T>>() {
@Override
public Subscription onSubscribe(final Observer<? super Observable<T>> observer) {
- OverlappingChunks<T, Observable<T>> windows = new OverlappingChunks<T, Observable<T>>(observer, WINDOW_MAKER);
+ OverlappingChunks<T, Observable<T>> windows = new OverlappingChunks<T, Observable<T>>(observer, windowMaker());
ChunkCreator creator = new ObservableBasedMultiChunkCreator<T, Observable<T>>(windows, windowOpenings, windowClosingSelector);
return source.subscribe(new ChunkObserver<T, Observable<T>>(windows, observer, creator));
}
@@ -166,7 +168,7 @@ public static <T> OnSubscribeFunc<Observable<T>> window(final Observable<T> sour
return new OnSubscribeFunc<Observable<T>>() {
@Override
public Subscription onSubscribe(final Observer<? super Observable<T>> observer) {
- Chunks<T, Observable<T>> chunks = new SizeBasedChunks<T, Observable<T>>(observer, WINDOW_MAKER, count);
+ Chunks<T, Observable<T>> chunks = new SizeBasedChunks<T, Observable<T>>(observer, windowMaker(), count);
ChunkCreator creator = new SkippingChunkCreator<T, Observable<T>>(chunks, skip);
return source.subscribe(new ChunkObserver<T, Observable<T>>(chunks, observer, creator));
}
@@ -221,7 +223,7 @@ public static <T> OnSubscribeFunc<Observable<T>> window(final Observable<T> sour
return new OnSubscribeFunc<Observable<T>>() {
@Override
public Subscription onSubscribe(final Observer<? super Observable<T>> observer) {
- NonOverlappingChunks<T, Observable<T>> windows = new NonOverlappingChunks<T, Observable<T>>(observer, WINDOW_MAKER);
+ NonOverlappingChunks<T, Observable<T>> windows = new NonOverlappingChunks<T, Observable<T>>(observer, windowMaker());
ChunkCreator creator = new TimeBasedChunkCreator<T, Observable<T>>(windows, timespan, unit, scheduler);
return source.subscribe(new ChunkObserver<T, Observable<T>>(windows, observer, creator));
}
@@ -282,7 +284,7 @@ public static <T> OnSubscribeFunc<Observable<T>> window(final Observable<T> sour
return new OnSubscribeFunc<Observable<T>>() {
@Override
public Subscription onSubscribe(final Observer<? super Observable<T>> observer) {
- Chunks<T, Observable<T>> chunks = new TimeAndSizeBasedChunks<T, Observable<T>>(observer, WINDOW_MAKER, count, timespan, unit, scheduler);
+ Chunks<T, Observable<T>> chunks = new TimeAndSizeBasedChunks<T, Observable<T>>(observer, windowMaker(), count, timespan, unit, scheduler);
ChunkCreator creator = new SingleChunkCreator<T, Observable<T>>(chunks);
return source.subscribe(new ChunkObserver<T, Observable<T>>(chunks, observer, creator));
}
@@ -343,7 +345,7 @@ public static <T> OnSubscribeFunc<Observable<T>> window(final Observable<T> sour
return new OnSubscribeFunc<Observable<T>>() {
@Override
public Subscription onSubscribe(final Observer<? super Observable<T>> observer) {
- OverlappingChunks<T, Observable<T>> windows = new TimeBasedChunks<T, Observable<T>>(observer, WINDOW_MAKER, timespan, unit, scheduler);
+ OverlappingChunks<T, Observable<T>> windows = new TimeBasedChunks<T, Observable<T>>(observer, windowMaker(), timespan, unit, scheduler);
ChunkCreator creator = new TimeBasedChunkCreator<T, Observable<T>>(windows, timeshift, unit, scheduler);
return source.subscribe(new ChunkObserver<T, Observable<T>>(windows, observer, creator));
}
@@ -373,7 +375,6 @@ public static class UnitTest {
private TestScheduler scheduler;
@Before
- @SuppressWarnings("unchecked")
public void before() {
scheduler = new TestScheduler();
}
|
b32aa01a3f9294fc219e8e3b9f274607ebc7ff20
|
restlet-framework-java
|
Fixed code generation for the GAE edition.--
|
c
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/modules/org.restlet/src/org/restlet/engine/io/ByteUtils.java b/modules/org.restlet/src/org/restlet/engine/io/ByteUtils.java
index 65f15820e0..8b11da2da4 100644
--- a/modules/org.restlet/src/org/restlet/engine/io/ByteUtils.java
+++ b/modules/org.restlet/src/org/restlet/engine/io/ByteUtils.java
@@ -128,25 +128,37 @@ public static WritableByteChannel getChannel(OutputStream outputStream) {
*/
public static ReadableByteChannel getChannel(
final Representation representation) throws IOException {
- final Pipe pipe = Pipe.open();
- final Application application = Application.getCurrent();
-
- // Get a thread that will handle the task of continuously
- // writing the representation into the input side of the pipe
- application.getTaskService().execute(new Runnable() {
- public void run() {
- try {
- WritableByteChannel wbc = pipe.sink();
- representation.write(wbc);
- wbc.close();
- } catch (IOException ioe) {
- Context.getCurrentLogger().log(Level.FINE,
- "Error while writing to the piped channel.", ioe);
+ if (Edition.CURRENT != Edition.GAE) {
+ // [ifndef gae]
+ final Pipe pipe = Pipe.open();
+ final Application application = Application.getCurrent();
+
+ // Get a thread that will handle the task of continuously
+ // writing the representation into the input side of the pipe
+ application.getTaskService().execute(new Runnable() {
+ public void run() {
+ try {
+ WritableByteChannel wbc = pipe.sink();
+ representation.write(wbc);
+ wbc.close();
+ } catch (IOException ioe) {
+ Context.getCurrentLogger().log(Level.FINE,
+ "Error while writing to the piped channel.",
+ ioe);
+ }
}
- }
- });
+ });
- return pipe.source();
+ return pipe.source();
+ // [enddef]
+ } else {
+ Context
+ .getCurrentLogger()
+ .log(
+ Level.WARNING,
+ "The GAE edition is unable to return a channel for a representation given its write(WritableByteChannel) method.");
+ return null;
+ }
}
/**
@@ -180,25 +192,40 @@ public static Reader getReader(InputStream stream, CharacterSet characterSet)
*/
public static Reader getReader(final WriterRepresentation representation)
throws IOException {
- final PipedWriter pipedWriter = new PipedWriter();
- final PipedReader pipedReader = new PipedReader(pipedWriter);
- final Application application = Application.getCurrent();
-
- // Gets a thread that will handle the task of continuously
- // writing the representation into the input side of the pipe
- application.getTaskService().execute(new Runnable() {
- public void run() {
- try {
- representation.write(pipedWriter);
- pipedWriter.close();
- } catch (IOException ioe) {
- Context.getCurrentLogger().log(Level.FINE,
- "Error while writing to the piped reader.", ioe);
+ if (Edition.CURRENT != Edition.GAE) {
+ // [ifndef gae]
+
+ final PipedWriter pipedWriter = new PipedWriter();
+ final PipedReader pipedReader = new PipedReader(pipedWriter);
+ final Application application = Application.getCurrent();
+
+ // Gets a thread that will handle the task of continuously
+ // writing the representation into the input side of the pipe
+ application.getTaskService().execute(new Runnable() {
+ public void run() {
+ try {
+ representation.write(pipedWriter);
+ pipedWriter.close();
+ } catch (IOException ioe) {
+ Context
+ .getCurrentLogger()
+ .log(
+ Level.FINE,
+ "Error while writing to the piped reader.",
+ ioe);
+ }
}
- }
- });
+ });
- return pipedReader;
+ return pipedReader;
+ // [enddef]
+ } else {
+ Context
+ .getCurrentLogger()
+ .log(Level.WARNING,
+ "The GAE edition is unable to return a reader for a writer representation.");
+ return null;
+ }
}
/**
|
30f9f278c3430f9e936f566ee8b3394f86f2b01e
|
elasticsearch
|
Added UNICODE_CHARACTER_CLASS support to Regex- flags. This flag is only supported in Java7 and is ignored if set on a java 6- JVM--Closes -2895-
|
a
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/main/java/org/elasticsearch/common/regex/Regex.java b/src/main/java/org/elasticsearch/common/regex/Regex.java
index 4a9a772737133..683feeae3c171 100644
--- a/src/main/java/org/elasticsearch/common/regex/Regex.java
+++ b/src/main/java/org/elasticsearch/common/regex/Regex.java
@@ -22,12 +22,20 @@
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.common.Strings;
+import java.util.Locale;
import java.util.regex.Pattern;
/**
*
*/
public class Regex {
+
+ /**
+ * This Regex / {@link Pattern} flag is supported from Java 7 on.
+ * If set on a Java6 JVM the flag will be ignored.
+ *
+ */
+ public static final int UNICODE_CHARACTER_CLASS = 0x100; // supported in JAVA7
/**
* Is the str a simple match pattern.
@@ -107,22 +115,25 @@ public static int flagsFromString(String flags) {
if (s.isEmpty()) {
continue;
}
- if ("CASE_INSENSITIVE".equalsIgnoreCase(s)) {
+ s = s.toUpperCase(Locale.ROOT);
+ if ("CASE_INSENSITIVE".equals(s)) {
pFlags |= Pattern.CASE_INSENSITIVE;
- } else if ("MULTILINE".equalsIgnoreCase(s)) {
+ } else if ("MULTILINE".equals(s)) {
pFlags |= Pattern.MULTILINE;
- } else if ("DOTALL".equalsIgnoreCase(s)) {
+ } else if ("DOTALL".equals(s)) {
pFlags |= Pattern.DOTALL;
- } else if ("UNICODE_CASE".equalsIgnoreCase(s)) {
+ } else if ("UNICODE_CASE".equals(s)) {
pFlags |= Pattern.UNICODE_CASE;
- } else if ("CANON_EQ".equalsIgnoreCase(s)) {
+ } else if ("CANON_EQ".equals(s)) {
pFlags |= Pattern.CANON_EQ;
- } else if ("UNIX_LINES".equalsIgnoreCase(s)) {
+ } else if ("UNIX_LINES".equals(s)) {
pFlags |= Pattern.UNIX_LINES;
- } else if ("LITERAL".equalsIgnoreCase(s)) {
+ } else if ("LITERAL".equals(s)) {
pFlags |= Pattern.LITERAL;
- } else if ("COMMENTS".equalsIgnoreCase(s)) {
+ } else if ("COMMENTS".equals(s)) {
pFlags |= Pattern.COMMENTS;
+ } else if ("UNICODE_CHAR_CLASS".equals(s)) {
+ pFlags |= UNICODE_CHARACTER_CLASS;
} else {
throw new ElasticSearchIllegalArgumentException("Unknown regex flag [" + s + "]");
}
@@ -155,6 +166,9 @@ public static String flagsToString(int flags) {
}
if ((flags & Pattern.COMMENTS) != 0) {
sb.append("COMMENTS|");
+ }
+ if ((flags & UNICODE_CHARACTER_CLASS) != 0) {
+ sb.append("UNICODE_CHAR_CLASS|");
}
return sb.toString();
}
diff --git a/src/test/java/org/elasticsearch/test/unit/common/regex/RegexTests.java b/src/test/java/org/elasticsearch/test/unit/common/regex/RegexTests.java
new file mode 100644
index 0000000000000..2d1bf6f222afc
--- /dev/null
+++ b/src/test/java/org/elasticsearch/test/unit/common/regex/RegexTests.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to ElasticSearch and Shay Banon under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. ElasticSearch licenses this
+ * file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.test.unit.common.regex;
+
+import java.util.Random;
+import java.util.regex.Pattern;
+
+import org.elasticsearch.common.regex.Regex;
+import org.testng.annotations.Test;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+public class RegexTests {
+
+ @Test
+ public void testFlags() {
+ String[] supportedFlags = new String[] { "CASE_INSENSITIVE", "MULTILINE", "DOTALL", "UNICODE_CASE", "CANON_EQ", "UNIX_LINES",
+ "LITERAL", "COMMENTS", "UNICODE_CHAR_CLASS" };
+ int[] flags = new int[] { Pattern.CASE_INSENSITIVE, Pattern.MULTILINE, Pattern.DOTALL, Pattern.UNICODE_CASE, Pattern.CANON_EQ,
+ Pattern.UNIX_LINES, Pattern.LITERAL, Pattern.COMMENTS, Regex.UNICODE_CHARACTER_CLASS };
+ long seed = System.currentTimeMillis();
+ Random random = new Random(seed);
+ int num = 10 + random.nextInt(100);
+ for (int i = 0; i < num; i++) {
+ int numFlags = random.nextInt(flags.length+1);
+ int current = 0;
+ StringBuilder builder = new StringBuilder();
+ for (int j = 0; j < numFlags; j++) {
+ int index = random.nextInt(flags.length);
+ current |= flags[index];
+ builder.append(supportedFlags[index]);
+ if (j < numFlags-1) {
+ builder.append("|");
+ }
+ }
+ String flagsToString = Regex.flagsToString(current);
+ assertThat(Regex.flagsFromString(builder.toString()), equalTo(current));
+ assertThat(Regex.flagsFromString(builder.toString()), equalTo(Regex.flagsFromString(flagsToString)));
+ Pattern.compile("\\w\\d{1,2}", current); // accepts the flags?
+ }
+ }
+}
\ No newline at end of file
|
b6aa74ef6393229a9d08ace867cdccdc63a91c64
|
hbase
|
HBASE-8299 ExploringCompactionPolicy can get- stuck in rare cases.--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1475966 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreEngine.java
index 5330085ac415..e7784ab2bd3b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreEngine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreEngine.java
@@ -26,10 +26,10 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
+import org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy;
import org.apache.hadoop.hbase.regionserver.compactions.RatioBasedCompactionPolicy;
import org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor;
import org.apache.hadoop.hbase.util.ReflectionUtils;
-import org.apache.hadoop.hbase.regionserver.compactions.ExploringCompactionPolicy;
/**
* Default StoreEngine creates the default compactor, policy, and store file manager, or
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
index 59a463f32e2c..9ea31777753b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
@@ -280,6 +280,10 @@ public long getStoreFileTtl() {
public long getMemstoreFlushSize() {
return this.region.memstoreFlushSize;
}
+
+ public long getBlockingFileCount() {
+ return blockingFileCount;
+ }
/* End implementation of StoreConfigInformation */
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreConfigInformation.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreConfigInformation.java
index d38d70975724..e7b7774bf877 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreConfigInformation.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreConfigInformation.java
@@ -40,4 +40,9 @@ public interface StoreConfigInformation {
* Gets the cf-specific time-to-live for store files.
*/
public long getStoreFileTtl();
+
+ /**
+ * The number of files required before flushes for this store will be blocked.
+ */
+ public long getBlockingFileCount();
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExploringCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExploringCompactionPolicy.java
index be9a2128f4b5..d27e0b99d3d1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExploringCompactionPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExploringCompactionPolicy.java
@@ -19,7 +19,6 @@
package org.apache.hadoop.hbase.regionserver.compactions;
-
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@@ -29,80 +28,128 @@
import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
import org.apache.hadoop.hbase.regionserver.StoreFile;
+/**
+ * Class to pick which files if any to compact together.
+ *
+ * This class will search all possibilities for different and if it gets stuck it will choose
+ * the smallest set of files to compact.
+ */
@InterfaceAudience.Private
public class ExploringCompactionPolicy extends RatioBasedCompactionPolicy {
- public ExploringCompactionPolicy(Configuration conf,
- StoreConfigInformation storeConfigInfo) {
+ /** Computed number of files that are needed to assume compactions are stuck. */
+ private final long filesNeededToForce;
+
+ /**
+ * Constructor for ExploringCompactionPolicy.
+ * @param conf The configuration object
+ * @param storeConfigInfo An object to provide info about the store.
+ */
+ public ExploringCompactionPolicy(final Configuration conf,
+ final StoreConfigInformation storeConfigInfo) {
super(conf, storeConfigInfo);
+ filesNeededToForce = storeConfigInfo.getBlockingFileCount();
}
@Override
- ArrayList<StoreFile> applyCompactionPolicy(ArrayList<StoreFile> candidates,
- boolean mayUseOffPeak) throws IOException {
+ final ArrayList<StoreFile> applyCompactionPolicy(final ArrayList<StoreFile> candidates,
+ final boolean mayUseOffPeak) throws IOException {
// Start off choosing nothing.
List<StoreFile> bestSelection = new ArrayList<StoreFile>(0);
+ List<StoreFile> smallest = new ArrayList<StoreFile>(0);
long bestSize = 0;
+ long smallestSize = Long.MAX_VALUE;
+
+ boolean mightBeStuck = candidates.size() >= filesNeededToForce;
// Consider every starting place.
for (int start = 0; start < candidates.size(); start++) {
// Consider every different sub list permutation in between start and end with min files.
- for(int currentEnd = start + comConf.getMinFilesToCompact() - 1;
+ for (int currentEnd = start + comConf.getMinFilesToCompact() - 1;
currentEnd < candidates.size(); currentEnd++) {
- List<StoreFile> potentialMatchFiles = candidates.subList(start, currentEnd+1);
+ List<StoreFile> potentialMatchFiles = candidates.subList(start, currentEnd + 1);
// Sanity checks
- if (potentialMatchFiles.size() < comConf.getMinFilesToCompact()) continue;
- if (potentialMatchFiles.size() > comConf.getMaxFilesToCompact()) continue;
- if (!filesInRatio(potentialMatchFiles, mayUseOffPeak)) continue;
+ if (potentialMatchFiles.size() < comConf.getMinFilesToCompact()) {
+ continue;
+ }
+ if (potentialMatchFiles.size() > comConf.getMaxFilesToCompact()) {
+ continue;
+ }
// Compute the total size of files that will
// have to be read if this set of files is compacted.
- long size = 0;
+ long size = getTotalStoreSize(potentialMatchFiles);
+
+ // Store the smallest set of files. This stored set of files will be used
+ // if it looks like the algorithm is stuck.
+ if (size < smallestSize) {
+ smallest = potentialMatchFiles;
+ smallestSize = size;
+ }
+
+ if (size >= comConf.getMinCompactSize()
+ && !filesInRatio(potentialMatchFiles, mayUseOffPeak)) {
+ continue;
+ }
- for (StoreFile s:potentialMatchFiles) {
- size += s.getReader().length();
+ if (size > comConf.getMaxCompactSize()) {
+ continue;
}
// Keep if this gets rid of more files. Or the same number of files for less io.
- if (potentialMatchFiles.size() > bestSelection.size() ||
- (potentialMatchFiles.size() == bestSelection.size() && size < bestSize)) {
+ if (potentialMatchFiles.size() > bestSelection.size()
+ || (potentialMatchFiles.size() == bestSelection.size() && size < bestSize)) {
bestSelection = potentialMatchFiles;
bestSize = size;
}
}
}
-
+ if (bestSelection.size() == 0 && mightBeStuck) {
+ return new ArrayList<StoreFile>(smallest);
+ }
return new ArrayList<StoreFile>(bestSelection);
}
/**
- * Check that all files satisfy the r
- * @param files
- * @return
+ * Find the total size of a list of store files.
+ * @param potentialMatchFiles StoreFile list.
+ * @return Sum of StoreFile.getReader().length();
*/
- private boolean filesInRatio(List<StoreFile> files, boolean isOffPeak) {
+ private long getTotalStoreSize(final List<StoreFile> potentialMatchFiles) {
+ long size = 0;
+
+ for (StoreFile s:potentialMatchFiles) {
+ size += s.getReader().length();
+ }
+ return size;
+ }
+
+ /**
+ * Check that all files satisfy the constraint
+ * FileSize(i) <= ( Sum(0,N,FileSize(_)) - FileSize(i) ) * Ratio.
+ *
+ * @param files List of store files to consider as a compaction candidate.
+ * @param isOffPeak should the offPeak compaction ratio be used ?
+ * @return a boolean if these files satisfy the ratio constraints.
+ */
+ private boolean filesInRatio(final List<StoreFile> files, final boolean isOffPeak) {
if (files.size() < 2) {
return true;
}
- double currentRatio = isOffPeak ?
- comConf.getCompactionRatioOffPeak() : comConf.getCompactionRatio();
+ final double currentRatio =
+ isOffPeak ? comConf.getCompactionRatioOffPeak() : comConf.getCompactionRatio();
- long totalFileSize = 0;
- for (int i = 0; i < files.size(); i++) {
- totalFileSize += files.get(i).getReader().length();
- }
- for (int i = 0; i < files.size(); i++) {
- long singleFileSize = files.get(i).getReader().length();
- long sumAllOtherFilesize = totalFileSize - singleFileSize;
+ long totalFileSize = getTotalStoreSize(files);
- if (( singleFileSize > sumAllOtherFilesize * currentRatio)
- && (sumAllOtherFilesize >= comConf.getMinCompactSize())){
+ for (StoreFile file : files) {
+ long singleFileSize = file.getReader().length();
+ long sumAllOtherFileSizes = totalFileSize - singleFileSize;
+
+ if (singleFileSize > sumAllOtherFileSizes * currentRatio) {
return false;
}
}
-
return true;
-
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
index 78fd9da9c039..c1bc17000dad 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java
@@ -114,6 +114,14 @@ public CompactionRequest selectCompaction(Collection<StoreFile> candidateFiles,
candidateSelection = checkMinFilesCriteria(candidateSelection);
}
candidateSelection = removeExcessFiles(candidateSelection, isUserCompaction, majorCompaction);
+
+ if (candidateSelection.size() == 0
+ && candidateFiles.size() >= storeConfigInfo.getBlockingFileCount()) {
+ candidateSelection = new ArrayList<StoreFile>(candidateFiles);
+ candidateSelection
+ .subList(0, Math.max(0,candidateSelection.size() - comConf.getMinFilesToCompact()))
+ .clear();
+ }
CompactionRequest result = new CompactionRequest(candidateSelection);
result.setOffPeak(!candidateSelection.isEmpty() && !majorCompaction && mayUseOffPeak);
return result;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java
index 20c388bdb6b6..b647ff8a838b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java
@@ -62,7 +62,7 @@ public class TestDefaultCompactSelection extends TestCase {
protected static final int maxFiles = 5;
protected static final long minSize = 10;
- protected static final long maxSize = 1000;
+ protected static final long maxSize = 2100;
private HLog hlog;
private HRegion region;
@@ -269,12 +269,8 @@ public void testCompactionRatio() throws IOException {
// big size + threshold
compactEquals(sfCreate(tooBig, tooBig, 700,700) /* empty */);
// small files = don't care about ratio
- compactEquals(sfCreate(8,3,1), 8,3,1);
- /* TODO: add sorting + unit test back in when HBASE-2856 is fixed
- // sort first so you don't include huge file the tail end.
- // happens with HFileOutputFormat bulk migration
- compactEquals(sfCreate(100,50,23,12,12, 500), 23, 12, 12);
- */
+ compactEquals(sfCreate(7,1,1), 7,1,1);
+
// don't exceed max file compact threshold
// note: file selection starts with largest to smallest.
compactEquals(sfCreate(7, 6, 5, 4, 3, 2, 1), 5, 4, 3, 2, 1);
@@ -285,6 +281,15 @@ public void testCompactionRatio() throws IOException {
compactEquals(sfCreate(251, 253, 251, maxSize -1), 251, 253, 251);
+ compactEquals(sfCreate(maxSize -1,maxSize -1,maxSize -1) /* empty */);
+
+ // Always try and compact something to get below blocking storefile count
+ this.conf.setLong("hbase.hstore.compaction.min.size", 1);
+ store.storeEngine.getCompactionPolicy().setConf(conf);
+ compactEquals(sfCreate(512,256,128,64,32,16,8,4,2,1), 4,2,1);
+ this.conf.setLong("hbase.hstore.compaction.min.size", minSize);
+ store.storeEngine.getCompactionPolicy().setConf(conf);
+
/* MAJOR COMPACTION */
// if a major compaction has been forced, then compact everything
compactEquals(sfCreate(50,25,12,12), true, 50, 25, 12, 12);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/ConstantSizeFileListGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/ConstantSizeFileListGenerator.java
new file mode 100644
index 000000000000..68d57afd7ca8
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/ConstantSizeFileListGenerator.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver.compactions;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.hadoop.hbase.regionserver.StoreFile;
+
+/**
+ * Class to generate several lists of StoreFiles that are all the same size.
+ */
+class ConstantSizeFileListGenerator extends StoreFileListGenerator {
+
+ /** How many mb's mock storefiles should be. */
+ private static final int FILESIZE = 5;
+
+ ConstantSizeFileListGenerator() {
+ super(ConstantSizeFileListGenerator.class);
+ }
+
+ @Override
+ public final Iterator<List<StoreFile>> iterator() {
+ return new Iterator<List<StoreFile>>() {
+ private int count = 0;
+
+ @Override
+ public boolean hasNext() {
+ return count < MAX_FILE_GEN_ITERS;
+ }
+
+ @Override
+ public List<StoreFile> next() {
+ count += 1;
+ ArrayList<StoreFile> files = new ArrayList<StoreFile>(NUM_FILES_GEN);
+ for (int i = 0; i < NUM_FILES_GEN; i++) {
+ files.add(createMockStoreFile(FILESIZE));
+ }
+ return files;
+ }
+
+ @Override
+ public void remove() {
+
+ }
+ };
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/EverythingPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/EverythingPolicy.java
new file mode 100644
index 000000000000..5265d8abd0bb
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/EverythingPolicy.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver.compactions;
+
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
+import org.apache.hadoop.hbase.regionserver.StoreFile;
+
+/**
+ * Test Policy to compact everything every time.
+ */
+public class EverythingPolicy extends RatioBasedCompactionPolicy {
+ /**
+ * Constructor.
+ *
+ * @param conf The Conf.
+ * @param storeConfigInfo Info about the store.
+ */
+ public EverythingPolicy(final Configuration conf,
+ final StoreConfigInformation storeConfigInfo) {
+ super(conf, storeConfigInfo);
+ }
+
+ @Override
+ final ArrayList<StoreFile> applyCompactionPolicy(final ArrayList<StoreFile> candidates,
+ final boolean mayUseOffPeak) throws IOException {
+
+ if (candidates.size() < comConf.getMinFilesToCompact()) {
+ return new ArrayList<StoreFile>(0);
+ }
+
+ return new ArrayList<StoreFile>(candidates);
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/ExplicitFileListGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/ExplicitFileListGenerator.java
new file mode 100644
index 000000000000..24302b81b688
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/ExplicitFileListGenerator.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver.compactions;
+
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.hadoop.hbase.regionserver.StoreFile;
+
+/**
+ * Class to create list of mock storefiles of specified length.
+ * This is great for testing edge cases.
+ */
+class ExplicitFileListGenerator extends StoreFileListGenerator {
+ /** The explicit files size lists to return. */
+ private int[][] fileSizes = new int[][]{
+ {1000, 350, 200, 100, 20, 10, 10},
+ {1000, 450, 200, 100, 20, 10, 10},
+ {1000, 550, 200, 100, 20, 10, 10},
+ {1000, 650, 200, 100, 20, 10, 10},
+ {1, 1, 600, 1, 1, 1, 1},
+ {1, 1, 600, 600, 600, 600, 600, 1, 1, 1, 1},
+ {1, 1, 600, 600, 600, 1, 1, 1, 1},
+ {1000, 250, 25, 25, 25, 25, 25, 25},
+ {25, 25, 25, 25, 25, 25, 500},
+ {1000, 1000, 1000, 1000, 900},
+ {107, 50, 10, 10, 10, 10},
+ {2000, 107, 50, 10, 10, 10, 10},
+ {9, 8, 7, 6, 5, 4, 3, 2, 1},
+ {11, 18, 9, 8, 7, 6, 5, 4, 3, 2, 1},
+ {110, 18, 18, 18, 18, 9, 8, 7, 6, 5, 4, 3, 2, 1},
+ {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 15}
+ };
+
+ ExplicitFileListGenerator() {
+ super(ExplicitFileListGenerator.class);
+ }
+
+ @Override
+ public final Iterator<List<StoreFile>> iterator() {
+ return new Iterator<List<StoreFile>>() {
+ private int nextIndex = 0;
+ @Override
+ public boolean hasNext() {
+ return nextIndex < fileSizes.length;
+ }
+
+ @Override
+ public List<StoreFile> next() {
+ List<StoreFile> files = createStoreFileList(fileSizes[nextIndex]);
+ nextIndex += 1;
+ return files;
+ }
+
+ @Override
+ public void remove() {
+ }
+ };
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/GaussianFileListGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/GaussianFileListGenerator.java
new file mode 100644
index 000000000000..a19e9ad8dc87
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/GaussianFileListGenerator.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver.compactions;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.commons.math.random.GaussianRandomGenerator;
+import org.apache.commons.math.random.MersenneTwister;
+import org.apache.hadoop.hbase.regionserver.StoreFile;
+
+class GaussianFileListGenerator extends StoreFileListGenerator {
+
+ GaussianFileListGenerator() {
+ super(GaussianFileListGenerator.class);
+ }
+
+ @Override
+ public Iterator<List<StoreFile>> iterator() {
+ return new Iterator<List<StoreFile>>() {
+ private GaussianRandomGenerator gen =
+ new GaussianRandomGenerator(new MersenneTwister(random.nextInt()));
+ private int count = 0;
+
+ @Override
+ public boolean hasNext() {
+ return count < MAX_FILE_GEN_ITERS;
+ }
+
+ @Override
+ public List<StoreFile> next() {
+ count += 1;
+ ArrayList<StoreFile> files = new ArrayList<StoreFile>(NUM_FILES_GEN);
+ for (int i = 0; i < NUM_FILES_GEN; i++) {
+ files.add(createMockStoreFile(
+ (int) Math.ceil(Math.max(0, gen.nextNormalizedDouble() * 32 + 32)))
+ );
+ }
+
+ return files;
+ }
+
+ @Override
+ public void remove() {
+ }
+ };
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
new file mode 100644
index 000000000000..076073647ee5
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver.compactions;
+
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Random;
+
+import com.google.common.base.Objects;
+import com.google.common.io.Files;
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.util.StringUtils;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+/**
+ * Base class of objects that can create mock store files with a given size.
+ */
+class MockStoreFileGenerator {
+ /** How many chars long the store file name will be. */
+ private static final int FILENAME_LENGTH = 10;
+ /** The random number generator. */
+ protected Random random;
+
+ MockStoreFileGenerator(Class klass) {
+ random = new Random(klass.getSimpleName().hashCode());
+ }
+
+ protected List<StoreFile> createStoreFileList(final int[] fs) {
+ List<StoreFile> storeFiles = new LinkedList<StoreFile>();
+ for (int fileSize : fs) {
+ storeFiles.add(createMockStoreFile(fileSize));
+ }
+ return storeFiles;
+ }
+
+ protected StoreFile createMockStoreFile(final long size) {
+ return createMockStoreFile(size * 1024 * 1024, -1L);
+ }
+
+ protected StoreFile createMockStoreFileBytes(final long size) {
+ return createMockStoreFile(size, -1L);
+ }
+
+ protected StoreFile createMockStoreFile(final long sizeInBytes, final long seqId) {
+ StoreFile mockSf = mock(StoreFile.class);
+ StoreFile.Reader reader = mock(StoreFile.Reader.class);
+ String stringPath = "/hbase/testTable/regionA/"
+ + RandomStringUtils.random(FILENAME_LENGTH, 0, 0, true, true, null, random);
+ Path path = new Path(stringPath);
+
+
+ when(reader.getSequenceID()).thenReturn(seqId);
+ when(reader.getTotalUncompressedBytes()).thenReturn(sizeInBytes);
+ when(reader.length()).thenReturn(sizeInBytes);
+
+ when(mockSf.getPath()).thenReturn(path);
+ when(mockSf.excludeFromMinorCompaction()).thenReturn(false);
+ when(mockSf.isReference()).thenReturn(false); // TODO come back to
+ // this when selection takes this into account
+ when(mockSf.getReader()).thenReturn(reader);
+ String toString = Objects.toStringHelper("MockStoreFile")
+ .add("isReference", false)
+ .add("fileSize", StringUtils.humanReadableInt(sizeInBytes))
+ .add("seqId", seqId)
+ .add("path", stringPath).toString();
+ when(mockSf.toString()).thenReturn(toString);
+
+ return mockSf;
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
index e26af1774899..8dc6550c0261 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
@@ -18,14 +18,11 @@
package org.apache.hadoop.hbase.regionserver.compactions;
-import com.google.common.base.Objects;
-import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.regionserver.HStore;
import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
import org.apache.hadoop.hbase.regionserver.StoreFile;
@@ -37,130 +34,135 @@
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collection;
-import java.util.LinkedList;
import java.util.List;
-import java.util.Random;
import static org.mockito.Mockito.mock;
+
import static org.mockito.Mockito.when;
-@Category(SmallTests.class)
+@Category(MediumTests.class)
@RunWith(Parameterized.class)
-public class PerfTestCompactionPolicies {
+public class PerfTestCompactionPolicies extends MockStoreFileGenerator {
+
- static final Log LOG = LogFactory.getLog(PerfTestCompactionPolicies.class);
+ private static final Log LOG = LogFactory.getLog(PerfTestCompactionPolicies.class);
private final RatioBasedCompactionPolicy cp;
+ private final StoreFileListGenerator generator;
+ private final HStore store;
+ private Class<? extends StoreFileListGenerator> fileGenClass;
private final int max;
private final int min;
private final float ratio;
private long written = 0;
- private long fileDiff = 0;
- private Random random;
@Parameterized.Parameters
public static Collection<Object[]> data() {
- return Arrays.asList(new Object[][] {
- {RatioBasedCompactionPolicy.class, 3, 2, 1.2f},
- {ExploringCompactionPolicy.class, 3, 2, 1.2f},
- {RatioBasedCompactionPolicy.class, 4, 2, 1.2f},
- {ExploringCompactionPolicy.class, 4, 2, 1.2f},
- {RatioBasedCompactionPolicy.class, 5, 2, 1.2f},
- {ExploringCompactionPolicy.class, 5, 2, 1.2f},
- {RatioBasedCompactionPolicy.class, 4, 2, 1.3f},
- {ExploringCompactionPolicy.class, 4, 2, 1.3f},
- {RatioBasedCompactionPolicy.class, 4, 2, 1.4f},
- {ExploringCompactionPolicy.class, 4, 2, 1.4f},
-
- });
+
+
+
+ Class[] policyClasses = new Class[]{
+ EverythingPolicy.class,
+ RatioBasedCompactionPolicy.class,
+ ExploringCompactionPolicy.class,
+ };
+
+ Class[] fileListGenClasses = new Class[]{
+ ExplicitFileListGenerator.class,
+ ConstantSizeFileListGenerator.class,
+ SemiConstantSizeFileListGenerator.class,
+ GaussianFileListGenerator.class,
+ SinusoidalFileListGenerator.class,
+ SpikyFileListGenerator.class
+ };
+
+ int[] maxFileValues = new int[] {10};
+ int[] minFilesValues = new int[] {3};
+ float[] ratioValues = new float[] {1.2f};
+
+ List<Object[]> params = new ArrayList<Object[]>(
+ maxFileValues.length
+ * minFilesValues.length
+ * fileListGenClasses.length
+ * policyClasses.length);
+
+
+ for (Class policyClass : policyClasses) {
+ for (Class genClass: fileListGenClasses) {
+ for (int maxFile:maxFileValues) {
+ for (int minFile:minFilesValues) {
+ for (float ratio:ratioValues) {
+ params.add(new Object[] {policyClass, genClass, maxFile, minFile, ratio});
+ }
+ }
+ }
+ }
+ }
+
+ return params;
}
/**
- * Test the perf of a CompactionPolicy with settings
- * @param cp The compaction policy to test
- * @param max The maximum number of file to compact
- * @param min The min number of files to compact
- * @param ratio The ratio that files must be under to be compacted.
+ * Test the perf of a CompactionPolicy with settings.
+ * @param cpClass The compaction policy to test
+ * @param inMmax The maximum number of file to compact
+ * @param inMin The min number of files to compact
+ * @param inRatio The ratio that files must be under to be compacted.
*/
- public PerfTestCompactionPolicies(Class<? extends CompactionPolicy> cpClass,
- int max, int min, float ratio) {
- this.max = max;
- this.min = min;
- this.ratio = ratio;
-
- //Hide lots of logging so the sysout is usable as a tab delimited file.
+ public PerfTestCompactionPolicies(
+ final Class<? extends CompactionPolicy> cpClass,
+ final Class<? extends StoreFileListGenerator> fileGenClass,
+ final int inMmax,
+ final int inMin,
+ final float inRatio) throws IllegalAccessException, InstantiationException {
+ super(PerfTestCompactionPolicies.class);
+ this.fileGenClass = fileGenClass;
+ this.max = inMmax;
+ this.min = inMin;
+ this.ratio = inRatio;
+
+ // Hide lots of logging so the system out is usable as a tab delimited file.
org.apache.log4j.Logger.getLogger(CompactionConfiguration.class).
setLevel(org.apache.log4j.Level.ERROR);
+ org.apache.log4j.Logger.getLogger(RatioBasedCompactionPolicy.class).
+ setLevel(org.apache.log4j.Level.ERROR);
org.apache.log4j.Logger.getLogger(cpClass).setLevel(org.apache.log4j.Level.ERROR);
+
Configuration configuration = HBaseConfiguration.create();
- //Make sure that this doesn't include every file.
+ // Make sure that this doesn't include every file.
configuration.setInt("hbase.hstore.compaction.max", max);
configuration.setInt("hbase.hstore.compaction.min", min);
configuration.setFloat("hbase.hstore.compaction.ratio", ratio);
- HStore store = createMockStore();
+ store = createMockStore();
this.cp = ReflectionUtils.instantiateWithCustomCtor(cpClass.getName(),
- new Class[] { Configuration.class, StoreConfigInformation.class },
- new Object[] { configuration, store });
+ new Class[] {Configuration.class, StoreConfigInformation.class },
+ new Object[] {configuration, store });
- //Used for making paths
- random = new Random(42);
+ this.generator = fileGenClass.newInstance();
+ // Used for making paths
}
@Test
- public void testSelection() throws Exception {
- //Some special cases. To simulate bulk loading patterns.
- int[][] fileSizes = new int[][]{
- {1000, 350, 200, 100, 20, 10, 10},
- {1000, 450, 200, 100, 20, 10, 10},
- {1000, 550, 200, 100, 20, 10, 10},
- {1000, 650, 200, 100, 20, 10, 10},
- {1000, 250, 25, 25, 25, 25, 25, 25},
- {25, 25, 25, 25, 25, 25, 500},
- {1000, 1000, 1000, 1000, 900},
- {107, 50, 10, 10, 10, 10},
- {2000, 107, 50, 10, 10, 10, 10},
- {9, 8, 7, 6, 5, 4, 3, 2, 1},
- {11, 18, 9, 8, 7, 6, 5, 4, 3, 2, 1},
- {110, 18, 18, 18, 18, 9, 8, 7, 6, 5, 4, 3, 2, 1},
- {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 15}
- };
-
- for (int[] fs : fileSizes) {
- List<StoreFile> storeFiles = createStoreFileList(fs);
- storeFiles = runIteration(storeFiles);
- runIteration(storeFiles);
- }
-
- for (int i = 0; i < 100; i++) {
- List<StoreFile> storeFiles = new LinkedList<StoreFile>();
-
- //Add some files to start with so that things are more normal
- storeFiles.add(createMockStoreFile(random.nextInt(1700) + 500));
- storeFiles.add(createMockStoreFile(random.nextInt(700) + 400));
- storeFiles.add(createMockStoreFile(random.nextInt(400) + 300));
- storeFiles.add(createMockStoreFile(random.nextInt(400) + 200));
-
- for (int x = 0; x < 50; x++) {
- storeFiles.add(createMockStoreFile(random.nextInt(90) + 10));
- storeFiles.add(createMockStoreFile(random.nextInt(90) + 10));
- storeFiles.add(createMockStoreFile(random.nextInt(90) + 10));
- storeFiles.add(createMockStoreFile(random.nextInt(90) + 10));
- storeFiles.add(createMockStoreFile(random.nextInt(90) + 10));
- storeFiles.add(createMockStoreFile(random.nextInt(90) + 10));
- storeFiles = runIteration(storeFiles);
- storeFiles = runIteration(storeFiles);
+ public final void testSelection() throws Exception {
+ long fileDiff = 0;
+ for (List<StoreFile> storeFileList : generator) {
+ List<StoreFile> currentFiles = new ArrayList<StoreFile>(18);
+ for (StoreFile file : storeFileList) {
+ currentFiles.add(file);
+ currentFiles = runIteration(currentFiles);
}
+ fileDiff += (storeFileList.size() - currentFiles.size());
}
- //print out tab delimited so that it can be used in excel/gdocs.
+ // print out tab delimited so that it can be used in excel/gdocs.
System.out.println(
- cp.getClass().getSimpleName()
+ cp.getClass().getSimpleName()
+ + "\t" + fileGenClass.getSimpleName()
+ "\t" + max
+ "\t" + min
+ "\t" + ratio
@@ -175,7 +177,7 @@ private List<StoreFile> runIteration(List<StoreFile> startingStoreFiles) throws
List<StoreFile> storeFiles = new ArrayList<StoreFile>(startingStoreFiles);
CompactionRequest req = cp.selectCompaction(
storeFiles, new ArrayList<StoreFile>(), false, false, false);
- int newFileSize = 0;
+ long newFileSize = 0;
Collection<StoreFile> filesToCompact = req.getFiles();
@@ -188,55 +190,17 @@ private List<StoreFile> runIteration(List<StoreFile> startingStoreFiles) throws
newFileSize += storeFile.getReader().length();
}
- storeFiles.add(createMockStoreFile(newFileSize));
+ storeFiles.add(createMockStoreFileBytes(newFileSize));
}
written += newFileSize;
- fileDiff += storeFiles.size() - startingStoreFiles.size();
return storeFiles;
}
- private List<StoreFile> createStoreFileList(int[] fs) {
- List<StoreFile> storeFiles = new LinkedList<StoreFile>();
- for (int fileSize : fs) {
- storeFiles.add(createMockStoreFile(fileSize));
- }
- return storeFiles;
- }
-
- private StoreFile createMockStoreFile(int sizeMb) {
- return createMockStoreFile(sizeMb, -1l);
- }
-
-
- private StoreFile createMockStoreFile(int sizeMb, long seqId) {
- StoreFile mockSf = mock(StoreFile.class);
- StoreFile.Reader reader = mock(StoreFile.Reader.class);
- String stringPath = "/hbase/" + RandomStringUtils.random(10, 0, 0, true, true, null, random);
- Path path = new Path(stringPath);
-
- when(reader.getSequenceID()).thenReturn(seqId);
- when(reader.getTotalUncompressedBytes()).thenReturn(Long.valueOf(sizeMb));
- when(reader.length()).thenReturn(Long.valueOf(sizeMb));
-
- when(mockSf.getPath()).thenReturn(path);
- when(mockSf.excludeFromMinorCompaction()).thenReturn(false);
- when(mockSf.isReference()).thenReturn(false); // TODO come back to
- // this when selection takes this into account
- when(mockSf.getReader()).thenReturn(reader);
- String toString = Objects.toStringHelper("MockStoreFile")
- .add("isReference", false)
- .add("fileSize", sizeMb)
- .add("seqId", seqId)
- .add("path", stringPath).toString();
- when(mockSf.toString()).thenReturn(toString);
-
- return mockSf;
- }
-
private HStore createMockStore() {
HStore s = mock(HStore.class);
when(s.getStoreFileTtl()).thenReturn(Long.MAX_VALUE);
+ when(s.getBlockingFileCount()).thenReturn(7L);
return s;
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SemiConstantSizeFileListGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SemiConstantSizeFileListGenerator.java
new file mode 100644
index 000000000000..ed4531a3101f
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SemiConstantSizeFileListGenerator.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver.compactions;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.hadoop.hbase.regionserver.StoreFile;
+
+class SemiConstantSizeFileListGenerator extends StoreFileListGenerator {
+ SemiConstantSizeFileListGenerator() {
+ super(SemiConstantSizeFileListGenerator.class);
+ }
+
+ @Override
+ public Iterator<List<StoreFile>> iterator() {
+ return new Iterator<List<StoreFile>>() {
+ private int count = 0;
+
+ @Override
+ public boolean hasNext() {
+ return count < MAX_FILE_GEN_ITERS;
+ }
+
+ @Override
+ public List<StoreFile> next() {
+ count += 1;
+ ArrayList<StoreFile> files = new ArrayList<StoreFile>(NUM_FILES_GEN);
+ for (int i = 0; i < NUM_FILES_GEN; i++) {
+ files.add(createMockStoreFile(random.nextInt(5) + 30));
+ }
+ return files;
+ }
+
+ @Override
+ public void remove() {
+
+ }
+ };
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SinusoidalFileListGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SinusoidalFileListGenerator.java
new file mode 100644
index 000000000000..6afbb2f2de65
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SinusoidalFileListGenerator.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver.compactions;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.hadoop.hbase.regionserver.StoreFile;
+
+class SinusoidalFileListGenerator extends StoreFileListGenerator {
+ SinusoidalFileListGenerator() {
+ super(SinusoidalFileListGenerator.class);
+ }
+
+ @Override
+ public Iterator<List<StoreFile>> iterator() {
+
+
+ return new Iterator<List<StoreFile>>() {
+ private int count = 0;
+ @Override
+ public boolean hasNext() {
+ return count < MAX_FILE_GEN_ITERS;
+ }
+
+ @Override
+ public List<StoreFile> next() {
+ count += 1;
+ ArrayList<StoreFile> files = new ArrayList<StoreFile>(NUM_FILES_GEN);
+ for (int x = 0; x < NUM_FILES_GEN; x++) {
+ int fileSize = (int) Math.abs(64 * Math.sin((Math.PI * x) / 50.0)) + 1;
+ files.add(createMockStoreFile(fileSize));
+ }
+ return files;
+ }
+
+ @Override
+ public void remove() {
+ }
+ };
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SpikyFileListGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SpikyFileListGenerator.java
new file mode 100644
index 000000000000..ebaa7115a143
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/SpikyFileListGenerator.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver.compactions;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.hadoop.hbase.regionserver.StoreFile;
+
+class SpikyFileListGenerator extends StoreFileListGenerator {
+
+ SpikyFileListGenerator() {
+ super(SpikyFileListGenerator.class);
+ }
+
+ @Override
+ public Iterator<List<StoreFile>> iterator() {
+ return new Iterator<List<StoreFile>>() {
+ private int count = 0;
+
+ @Override
+ public boolean hasNext() {
+ return count < (MAX_FILE_GEN_ITERS);
+ }
+
+ @Override
+ public List<StoreFile> next() {
+ count += 1;
+ ArrayList<StoreFile> files = new ArrayList<StoreFile>(NUM_FILES_GEN);
+ for (int x = 0; x < NUM_FILES_GEN; x++) {
+ int fileSize = random.nextInt(5) + 1;
+ if ( x % 10 == 0) {
+ fileSize = random.nextInt(5) + 50;
+ }
+ files.add(createMockStoreFile(fileSize));
+ }
+ return files;
+ }
+
+ @Override
+ public void remove() {
+ }
+ };
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/StoreFileListGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/StoreFileListGenerator.java
new file mode 100644
index 000000000000..643f7714fd27
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/StoreFileListGenerator.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver.compactions;
+
+import java.util.List;
+
+import org.apache.hadoop.hbase.regionserver.StoreFile;
+
+public abstract class StoreFileListGenerator
+ extends MockStoreFileGenerator implements Iterable<List<StoreFile>> {
+
+ public static final int MAX_FILE_GEN_ITERS = 10;
+ public static final int NUM_FILES_GEN = 1000;
+
+ StoreFileListGenerator(final Class klass) {
+ super(klass);
+ }
+}
|
930022aaacd24376b0f17168d5b8c36ca401e626
|
kotlin
|
J2K: correct conversion of nested class- references-- -KT-5294 Fixed- -KT-5400 Fixed-
|
c
|
https://github.com/JetBrains/kotlin
|
diff --git a/j2k/src/org/jetbrains/jet/j2k/ConstructorConverter.kt b/j2k/src/org/jetbrains/jet/j2k/ConstructorConverter.kt
index 1aabd44ea4512..bd23239ff6278 100644
--- a/j2k/src/org/jetbrains/jet/j2k/ConstructorConverter.kt
+++ b/j2k/src/org/jetbrains/jet/j2k/ConstructorConverter.kt
@@ -183,8 +183,7 @@ class ConstructorConverter(private val psiClass: PsiClass, private val converter
var body = postProcessBody(bodyConverter.convertBlock(constructor.getBody()))
val containingClass = constructor.getContainingClass()
val typeParameterList = converter.convertTypeParameterList(containingClass?.getTypeParameterList())
- val factoryFunctionType = ClassType(containingClass?.declarationIdentifier() ?: Identifier.Empty,
- typeParameterList.parameters,
+ val factoryFunctionType = ClassType(ReferenceElement(containingClass?.declarationIdentifier() ?: Identifier.Empty, typeParameterList.parameters).assignNoPrototype(),
Nullability.NotNull,
converter.settings).assignNoPrototype()
return FactoryFunction(constructor.declarationIdentifier(), annotations, correctFactoryFunctionAccess(modifiers),
@@ -201,13 +200,14 @@ class ConstructorConverter(private val psiClass: PsiClass, private val converter
val body = primaryConstructor.getBody()
val parameterUsageReplacementMap = HashMap<String, String>()
+ val correctedTypeConverter = converter.withSpecialContext(psiClass).typeConverter /* to correct nested class references */
val block = if (body != null) {
val statementsToRemove = HashSet<PsiStatement>()
for (parameter in params) {
val (field, initializationStatement) = findBackingFieldForConstructorParameter(parameter, primaryConstructor) ?: continue
- val fieldType = typeConverter.convertVariableType(field)
- val parameterType = typeConverter.convertVariableType(parameter)
+ val fieldType = correctedTypeConverter.convertVariableType(field)
+ val parameterType = correctedTypeConverter.convertVariableType(parameter)
// types can be different only in nullability
val `type` = if (fieldType == parameterType) {
fieldType
@@ -244,6 +244,7 @@ class ConstructorConverter(private val psiClass: PsiClass, private val converter
// we need to replace renamed parameter usages in base class constructor arguments and in default values
val correctedConverter = converter.withExpressionVisitor { ReplacingExpressionVisitor(this, parameterUsageReplacementMap, it) }
+ .withSpecialContext(psiClass) /* to correct nested class references */
val statement = primaryConstructor.getBody()?.getStatements()?.firstOrNull()
val methodCall = (statement as? PsiExpressionStatement)?.getExpression() as? PsiMethodCallExpression
@@ -259,7 +260,7 @@ class ConstructorConverter(private val psiClass: PsiClass, private val converter
else
null
if (!parameterToField.containsKey(parameter)) {
- converter.convertParameter(parameter, defaultValue = defaultValue)
+ correctedConverter.convertParameter(parameter, defaultValue = defaultValue)
}
else {
val (field, `type`) = parameterToField[parameter]!!
diff --git a/j2k/src/org/jetbrains/jet/j2k/Converter.kt b/j2k/src/org/jetbrains/jet/j2k/Converter.kt
index 62cfeb92ddeb8..fdab1aa2afed9 100644
--- a/j2k/src/org/jetbrains/jet/j2k/Converter.kt
+++ b/j2k/src/org/jetbrains/jet/j2k/Converter.kt
@@ -24,6 +24,7 @@ import com.intellij.psi.CommonClassNames.*
import org.jetbrains.jet.lang.types.expressions.OperatorConventions.*
import com.intellij.openapi.project.Project
import com.intellij.psi.util.PsiMethodUtil
+import com.intellij.psi.util.PsiTreeUtil
public trait ConversionScope {
public fun contains(element: PsiElement): Boolean
@@ -34,29 +35,53 @@ public class FilesConversionScope(val files: Collection<PsiJavaFile>) : Conversi
}
public class Converter private(val project: Project, val settings: ConverterSettings, val conversionScope: ConversionScope, val state: Converter.State) {
- private class State(val typeConverter: TypeConverter,
- val methodReturnType: PsiType?,
+ private class State(val methodReturnType: PsiType?,
val expressionVisitorFactory: (Converter) -> ExpressionVisitor,
- val statementVisitorFactory: (Converter) -> StatementVisitor)
- val typeConverter: TypeConverter = state.typeConverter
+ val statementVisitorFactory: (Converter) -> StatementVisitor,
+ val specialContext: PsiElement?,
+ val importList: ImportList?,
+ val importsToAdd: MutableCollection<String>?)
+
+ val typeConverter: TypeConverter = TypeConverter(this)
+
val methodReturnType: PsiType? = state.methodReturnType
+ val specialContext: PsiElement? = state.specialContext
+ val importNames: Set<String> = state.importList?.imports?.mapTo(HashSet<String>()) { it.name } ?: setOf()
+ val importsToAdd: MutableCollection<String>? = state.importsToAdd
private val expressionVisitor = state.expressionVisitorFactory(this)
private val statementVisitor = state.statementVisitorFactory(this)
class object {
- public fun create(project: Project, settings: ConverterSettings, conversionScope: ConversionScope): Converter
- = Converter(project, settings, conversionScope, State(TypeConverter(settings, conversionScope), null, { ExpressionVisitor(it) }, { StatementVisitor(it) }))
+ public fun create(project: Project, settings: ConverterSettings, conversionScope: ConversionScope): Converter {
+ val state = State(null, { ExpressionVisitor(it) }, { StatementVisitor(it) }, null, null, null)
+ return Converter(project, settings, conversionScope, state)
+ }
}
fun withMethodReturnType(methodReturnType: PsiType?): Converter
- = Converter(project, settings, conversionScope, State(typeConverter, methodReturnType, state.expressionVisitorFactory, state.statementVisitorFactory))
+ = Converter(project, settings, conversionScope,
+ State(methodReturnType, state.expressionVisitorFactory, state.statementVisitorFactory, state.specialContext, state.importList, state.importsToAdd))
fun withExpressionVisitor(factory: (Converter) -> ExpressionVisitor): Converter
- = Converter(project, settings, conversionScope, State(typeConverter, state.methodReturnType, factory, state.statementVisitorFactory))
+ = Converter(project, settings, conversionScope,
+ State(state.methodReturnType, factory, state.statementVisitorFactory, state.specialContext, state.importList, state.importsToAdd))
fun withStatementVisitor(factory: (Converter) -> StatementVisitor): Converter
- = Converter(project, settings, conversionScope, State(typeConverter, state.methodReturnType, state.expressionVisitorFactory, factory))
+ = Converter(project, settings, conversionScope,
+ State(state.methodReturnType, state.expressionVisitorFactory, factory, state.specialContext, state.importList, state.importsToAdd))
+
+ fun withSpecialContext(context: PsiElement): Converter
+ = Converter(project, settings, conversionScope,
+ State(state.methodReturnType, state.expressionVisitorFactory, state.statementVisitorFactory, context, state.importList, state.importsToAdd))
+
+ private fun withImportList(importList: ImportList): Converter
+ = Converter(project, settings, conversionScope,
+ State(state.methodReturnType, state.expressionVisitorFactory, state.statementVisitorFactory, state.specialContext, importList, state.importsToAdd))
+
+ private fun withImportsToAdd(importsToAdd: MutableCollection<String>): Converter
+ = Converter(project, settings, conversionScope,
+ State(state.methodReturnType, state.expressionVisitorFactory, state.statementVisitorFactory, state.specialContext, state.importList, importsToAdd))
public fun elementToKotlin(element: PsiElement): String {
val converted = convertTopElement(element) ?: return ""
@@ -80,21 +105,22 @@ public class Converter private(val project: Project, val settings: ConverterSett
}
private fun convertFile(javaFile: PsiJavaFile): File {
+ val importsToAdd = LinkedHashSet<String>()
+ var converter = this.withImportsToAdd(importsToAdd)
var convertedChildren = javaFile.getChildren().map {
if (it is PsiImportList) {
val importList = convertImportList(it)
- typeConverter.importList = importList
+ converter = converter.withImportList(importList)
importList
}
else {
- convertTopElement(it)
+ converter.convertTopElement(it)
}
}.filterNotNull()
- typeConverter.importList = null
- if (typeConverter.importsToAdd.isNotEmpty()) {
+ if (importsToAdd.isNotEmpty()) {
val importList = convertedChildren.filterIsInstance(javaClass<ImportList>()).first()
- val newImportList = ImportList(importList.imports + typeConverter.importsToAdd).assignPrototypesFrom(importList)
+ val newImportList = ImportList(importList.imports + importsToAdd.map { Import(it).assignNoPrototype() }).assignPrototypesFrom(importList)
convertedChildren = convertedChildren.map { if (it == importList) newImportList else it }
}
@@ -392,6 +418,7 @@ public class Converter private(val project: Project, val settings: ConverterSett
return expressionVisitor.result.assignPrototype(expression)
}
+ //TODO: drop this method - it has unclear semantics
fun convertElement(element: PsiElement?): Element {
if (element == null) return Element.Empty
@@ -400,6 +427,46 @@ public class Converter private(val project: Project, val settings: ConverterSett
return elementVisitor.result.assignPrototype(element)
}
+ fun convertCodeReferenceElement(element: PsiJavaCodeReferenceElement, hasExternalQualifier: Boolean, typeArgsConverted: List<Element>? = null): ReferenceElement {
+ val typeArgs = typeArgsConverted ?: typeConverter.convertTypes(element.getTypeParameters())
+
+ if (element.isQualified()) {
+ var result = Identifier.toKotlin(element.getReferenceName()!!)
+ var qualifier = element.getQualifier()
+ while (qualifier != null) {
+ val codeRefElement = qualifier as PsiJavaCodeReferenceElement
+ result = Identifier.toKotlin(codeRefElement.getReferenceName()!!) + "." + result
+ qualifier = codeRefElement.getQualifier()
+ }
+ return ReferenceElement(Identifier(result).assignNoPrototype(), typeArgs).assignPrototype(element)
+ }
+ else {
+ if (!hasExternalQualifier) {
+ // references to nested classes may need correction
+ val targetClass = element.resolve() as? PsiClass
+ if (targetClass != null) {
+ val identifier = constructNestedClassReferenceIdentifier(targetClass, specialContext ?: element)
+ if (identifier != null) {
+ return ReferenceElement(identifier, typeArgs).assignPrototype(element)
+ }
+ }
+ }
+
+ return ReferenceElement(Identifier(element.getReferenceName()!!).assignNoPrototype(), typeArgs).assignPrototype(element)
+ }
+ }
+
+ private fun constructNestedClassReferenceIdentifier(psiClass: PsiClass, context: PsiElement): Identifier? {
+ val outerClass = psiClass.getContainingClass()
+ if (outerClass != null
+ && !PsiTreeUtil.isAncestor(outerClass, context, true)
+ && !psiClass.isImported(context.getContainingFile() as PsiJavaFile)) {
+ val qualifier = constructNestedClassReferenceIdentifier(outerClass, context)?.name ?: outerClass.getName()!!
+ return Identifier(Identifier.toKotlin(qualifier) + "." + Identifier.toKotlin(psiClass.getName()!!)).assignNoPrototype()
+ }
+ return null
+ }
+
fun convertTypeElement(element: PsiTypeElement?): TypeElement
= TypeElement(if (element == null) ErrorType().assignNoPrototype() else typeConverter.convertType(element.getType())).assignPrototype(element)
diff --git a/j2k/src/org/jetbrains/jet/j2k/TypeConverter.kt b/j2k/src/org/jetbrains/jet/j2k/TypeConverter.kt
index 683d327f3a4d6..023bad75cdf26 100644
--- a/j2k/src/org/jetbrains/jet/j2k/TypeConverter.kt
+++ b/j2k/src/org/jetbrains/jet/j2k/TypeConverter.kt
@@ -31,27 +31,16 @@ import org.jetbrains.jet.j2k.ast.ErrorType
import com.intellij.codeInsight.NullableNotNullManager
import org.jetbrains.jet.j2k.ast.ArrayType
import org.jetbrains.jet.j2k.ast.ClassType
+import org.jetbrains.jet.j2k.ast.ReferenceElement
import org.jetbrains.jet.j2k.ast.Identifier
-class TypeConverter(val settings: ConverterSettings, val conversionScope: ConversionScope) {
+class TypeConverter(val converter: Converter) {
private val nullabilityCache = HashMap<PsiElement, Nullability>()
- private val classesToImport = HashSet<String>()
-
- public var importList: ImportList? = null
- set(value) {
- $importList = value
- importNames = importList?.imports?.mapTo(HashSet<String>()) { it.name } ?: setOf()
-
- }
- private var importNames: Set<String> = setOf()
-
- public val importsToAdd: Collection<Import>
- get() = classesToImport.map { Import(it).assignNoPrototype() }
public fun convertType(`type`: PsiType?, nullability: Nullability = Nullability.Default): Type {
if (`type` == null) return ErrorType().assignNoPrototype()
- val result = `type`.accept<Type>(TypeVisitor(this, importNames, classesToImport))!!.assignNoPrototype()
+ val result = `type`.accept<Type>(TypeVisitor(converter))!!.assignNoPrototype()
return when (nullability) {
Nullability.NotNull -> result.toNotNullType()
Nullability.Nullable -> result.toNullableType()
@@ -64,9 +53,9 @@ class TypeConverter(val settings: ConverterSettings, val conversionScope: Conver
public fun convertVariableType(variable: PsiVariable): Type {
val result = if (variable.isMainMethodParameter()) {
- ArrayType(ClassType(Identifier("String").assignNoPrototype(), listOf(), Nullability.NotNull, settings).assignNoPrototype(),
+ ArrayType(ClassType(ReferenceElement(Identifier("String").assignNoPrototype(), listOf()).assignNoPrototype(), Nullability.NotNull, converter.settings).assignNoPrototype(),
Nullability.NotNull,
- settings)
+ converter.settings).assignNoPrototype()
}
else {
convertType(variable.getType(), variableNullability(variable))
@@ -124,7 +113,7 @@ class TypeConverter(val settings: ConverterSettings, val conversionScope: Conver
return Nullability.NotNull
}
- if (!conversionScope.contains(variable)) { // do not analyze usages out of our conversion scope
+ if (!converter.conversionScope.contains(variable)) { // do not analyze usages out of our conversion scope
if (variable is PsiParameter) {
// Object.equals corresponds to Any.equals which has nullable parameter:
val scope = variable.getDeclarationScope()
@@ -203,7 +192,7 @@ class TypeConverter(val settings: ConverterSettings, val conversionScope: Conver
return Nullability.Nullable
}
- if (!conversionScope.contains(method)) return nullability // do not analyze body and usages of methods out of our conversion scope
+ if (!converter.conversionScope.contains(method)) return nullability // do not analyze body and usages of methods out of our conversion scope
if (nullability == Nullability.Default) {
method.getBody()?.accept(object: JavaRecursiveElementVisitor() {
diff --git a/j2k/src/org/jetbrains/jet/j2k/Utils.kt b/j2k/src/org/jetbrains/jet/j2k/Utils.kt
index 4d230d79981c3..9f1cdf95918d3 100644
--- a/j2k/src/org/jetbrains/jet/j2k/Utils.kt
+++ b/j2k/src/org/jetbrains/jet/j2k/Utils.kt
@@ -139,3 +139,20 @@ fun PsiMethod.isMainMethod(): Boolean = PsiMethodUtil.isMainMethod(this)
fun <T: Any> List<T>.singleOrNull2(): T? = if (size == 1) this[0] else null
fun <T: Any> Array<T>.singleOrNull2(): T? = if (size == 1) this[0] else null
+
+fun PsiMember.isImported(file: PsiJavaFile): Boolean {
+ if (this is PsiClass) {
+ val fqName = getQualifiedName()
+ val index = fqName?.lastIndexOf('.') ?: -1
+ val parentName = if (index >= 0) fqName!!.substring(0, index) else null
+ return file.getImportList()?.getAllImportStatements()?.any {
+ it.getImportReference()?.getQualifiedName() == (if (it.isOnDemand()) parentName else fqName)
+ } ?: false
+ }
+ else {
+ return getContainingClass() != null && file.getImportList()?.getImportStaticStatements()?.any {
+ it.resolveTargetClass() == getContainingClass() && (it.isOnDemand() || it.getReferenceName() == getName())
+ } ?: false
+ }
+}
+
diff --git a/j2k/src/org/jetbrains/jet/j2k/ast/NewClassExpression.kt b/j2k/src/org/jetbrains/jet/j2k/ast/NewClassExpression.kt
index bf4b77386c827..0633e3b466f1b 100644
--- a/j2k/src/org/jetbrains/jet/j2k/ast/NewClassExpression.kt
+++ b/j2k/src/org/jetbrains/jet/j2k/ast/NewClassExpression.kt
@@ -19,7 +19,7 @@ package org.jetbrains.jet.j2k.ast
import org.jetbrains.jet.j2k.*
class NewClassExpression(
- val name: Element,
+ val name: ReferenceElement?,
val arguments: List<Expression>,
val qualifier: Expression = Expression.Empty,
val anonymousClass: AnonymousClassBody? = null
@@ -34,7 +34,9 @@ class NewClassExpression(
builder.append(qualifier).append(if (qualifier.isNullable) "!!." else ".")
}
- builder.append(name)
+ if (name != null) {
+ builder.append(name)
+ }
if (anonymousClass == null || !anonymousClass.extendsTrait) {
builder.append("(").append(arguments, ", ").append(")")
diff --git a/j2k/src/org/jetbrains/jet/j2k/ast/ReferenceElement.kt b/j2k/src/org/jetbrains/jet/j2k/ast/ReferenceElement.kt
index 9e03f71646f8b..ea8a8f8f8d302 100644
--- a/j2k/src/org/jetbrains/jet/j2k/ast/ReferenceElement.kt
+++ b/j2k/src/org/jetbrains/jet/j2k/ast/ReferenceElement.kt
@@ -18,8 +18,8 @@ package org.jetbrains.jet.j2k.ast
import org.jetbrains.jet.j2k.*
-class ReferenceElement(val reference: Identifier, val types: List<Type>) : Element() {
+class ReferenceElement(val name: Identifier, val typeArgs: List<Element>) : Element() {
override fun generateCode(builder: CodeBuilder) {
- builder.append(reference).append(types, ", ", "<", ">")
+ builder.append(name).append(typeArgs, ", ", "<", ">")
}
}
diff --git a/j2k/src/org/jetbrains/jet/j2k/ast/Types.kt b/j2k/src/org/jetbrains/jet/j2k/ast/Types.kt
index 1e4e6fc71c231..cb58efc14beb9 100644
--- a/j2k/src/org/jetbrains/jet/j2k/ast/Types.kt
+++ b/j2k/src/org/jetbrains/jet/j2k/ast/Types.kt
@@ -78,15 +78,15 @@ class ErrorType : NotNullType() {
}
}
-class ClassType(val name: Identifier, val typeArgs: List<Element>, nullability: Nullability, settings: ConverterSettings)
+class ClassType(val referenceElement: ReferenceElement, nullability: Nullability, settings: ConverterSettings)
: MayBeNullableType(nullability, settings) {
override fun generateCode(builder: CodeBuilder) {
- builder.append(name).append(typeArgs, ", ", "<", ">").append(isNullableStr)
+ builder append referenceElement append isNullableStr
}
- override fun toNotNullType(): Type = ClassType(name, typeArgs, Nullability.NotNull, settings).assignPrototypesFrom(this)
- override fun toNullableType(): Type = ClassType(name, typeArgs, Nullability.Nullable, settings).assignPrototypesFrom(this)
+ override fun toNotNullType(): Type = ClassType(referenceElement, Nullability.NotNull, settings).assignPrototypesFrom(this)
+ override fun toNullableType(): Type = ClassType(referenceElement, Nullability.Nullable, settings).assignPrototypesFrom(this)
}
class ArrayType(val elementType: Type, nullability: Nullability, settings: ConverterSettings)
diff --git a/j2k/src/org/jetbrains/jet/j2k/visitors/ElementVisitor.kt b/j2k/src/org/jetbrains/jet/j2k/visitors/ElementVisitor.kt
index 6f5d0bab88c2b..2e971a8196d2c 100644
--- a/j2k/src/org/jetbrains/jet/j2k/visitors/ElementVisitor.kt
+++ b/j2k/src/org/jetbrains/jet/j2k/visitors/ElementVisitor.kt
@@ -42,23 +42,6 @@ class ElementVisitor(private val converter: Converter) : JavaElementVisitor() {
result = ExpressionList(converter.convertExpressions(list.getExpressions()))
}
- override fun visitReferenceElement(reference: PsiJavaCodeReferenceElement) {
- val types = typeConverter.convertTypes(reference.getTypeParameters())
- if (!reference.isQualified()) {
- result = ReferenceElement(Identifier(reference.getReferenceName()!!).assignNoPrototype(), types)
- }
- else {
- var code = Identifier.toKotlin(reference.getReferenceName()!!)
- var qualifier = reference.getQualifier()
- while (qualifier != null) {
- val p = qualifier as PsiJavaCodeReferenceElement
- code = Identifier.toKotlin(p.getReferenceName()!!) + "." + code
- qualifier = p.getQualifier()
- }
- result = ReferenceElement(Identifier(code).assignNoPrototype(), types)
- }
- }
-
override fun visitTypeElement(`type`: PsiTypeElement) {
result = TypeElement(typeConverter.convertType(`type`.getType()))
}
diff --git a/j2k/src/org/jetbrains/jet/j2k/visitors/ExpressionVisitor.kt b/j2k/src/org/jetbrains/jet/j2k/visitors/ExpressionVisitor.kt
index 22d42e06c31a8..63ccb01eadce6 100644
--- a/j2k/src/org/jetbrains/jet/j2k/visitors/ExpressionVisitor.kt
+++ b/j2k/src/org/jetbrains/jet/j2k/visitors/ExpressionVisitor.kt
@@ -254,19 +254,17 @@ open class ExpressionVisitor(private val converter: Converter) : JavaElementVisi
converter.convertExpressions(expression.getArrayDimensions()))
}
else {
- result = createNewClassExpression(expression)
+ val anonymousClass = expression.getAnonymousClass()
+ val qualifier = expression.getQualifier()
+ val classRef = expression.getClassOrAnonymousClassReference()
+ val classRefConverted = if (classRef != null) converter.convertCodeReferenceElement(classRef, hasExternalQualifier = qualifier != null) else null
+ result = NewClassExpression(classRefConverted,
+ convertArguments(expression),
+ converter.convertExpression(qualifier),
+ if (anonymousClass != null) converter.convertAnonymousClassBody(anonymousClass) else null)
}
}
- private fun createNewClassExpression(expression: PsiNewExpression): Expression {
- val anonymousClass = expression.getAnonymousClass()
- val classReference = expression.getClassOrAnonymousClassReference()
- return NewClassExpression(converter.convertElement(classReference),
- convertArguments(expression),
- converter.convertExpression(expression.getQualifier()),
- if (anonymousClass != null) converter.convertAnonymousClassBody(anonymousClass) else null)
- }
-
override fun visitParenthesizedExpression(expression: PsiParenthesizedExpression) {
result = ParenthesizedExpression(converter.convertExpression(expression.getExpression()))
}
@@ -317,16 +315,18 @@ open class ExpressionVisitor(private val converter: Converter) : JavaElementVisi
}
// add qualification for static members from base classes and also this works for enum constants in switch
+ val context = converter.specialContext ?: expression
if (target is PsiMember
&& target.hasModifierProperty(PsiModifier.STATIC)
&& target.getContainingClass() != null
- && !PsiTreeUtil.isAncestor(target.getContainingClass(), expression, true)
- && !isStaticallyImported(target, expression)) {
+ && !PsiTreeUtil.isAncestor(target.getContainingClass(), context, true)
+ && !target.isImported(context.getContainingFile() as PsiJavaFile)) {
var member: PsiMember = target
var code = Identifier.toKotlin(referenceName)
- while (member.getContainingClass() != null) {
- code = Identifier.toKotlin(member.getContainingClass()!!.getName()!!) + "." + code
- member = member.getContainingClass()!!
+ while (true) {
+ val containingClass = member.getContainingClass() ?: break
+ code = Identifier.toKotlin(containingClass.getName()!!) + "." + code
+ member = containingClass
}
result = Identifier(code, false, false)
return
@@ -433,23 +433,4 @@ open class ExpressionVisitor(private val converter: Converter) : JavaElementVisi
}
return ""
}
-
- private fun isStaticallyImported(member: PsiMember, context: PsiElement): Boolean {
- val containingFile = context.getContainingFile()
- val targetContainingClass = member.getContainingClass()
- if (containingFile is PsiJavaFile && targetContainingClass != null) {
- val importList = containingFile.getImportList();
- if (importList != null) {
- return importList.getImportStaticStatements().any { importResolvesTo(it, member) }
- }
- }
- return false
- }
-
- private fun importResolvesTo(importStatement: PsiImportStaticStatement, member: PsiMember): Boolean {
- val targetContainingClass = member.getContainingClass()
- val importedClass = importStatement.resolveTargetClass()
- return importedClass == targetContainingClass
- && (importStatement.isOnDemand() || importStatement.getReferenceName() == member.getName())
- }
}
diff --git a/j2k/src/org/jetbrains/jet/j2k/visitors/TypeVisitor.kt b/j2k/src/org/jetbrains/jet/j2k/visitors/TypeVisitor.kt
index c11ea5193d3e7..2db88fd1b88e4 100644
--- a/j2k/src/org/jetbrains/jet/j2k/visitors/TypeVisitor.kt
+++ b/j2k/src/org/jetbrains/jet/j2k/visitors/TypeVisitor.kt
@@ -19,15 +19,19 @@ package org.jetbrains.jet.j2k.visitors
import com.intellij.psi.*
import com.intellij.psi.impl.source.PsiClassReferenceType
import org.jetbrains.jet.j2k.ast.*
-import java.util.LinkedList
import com.intellij.openapi.util.text.StringUtil
-import java.util.ArrayList
import org.jetbrains.jet.lang.resolve.java.JvmPrimitiveType
import org.jetbrains.jet.j2k.TypeConverter
+import java.util.ArrayList
+import org.jetbrains.jet.j2k.singleOrNull2
+import org.jetbrains.jet.j2k.Converter
private val PRIMITIVE_TYPES_NAMES = JvmPrimitiveType.values().map { it.getName() }
-class TypeVisitor(private val converter: TypeConverter, private val importNames: Set<String>, private val classesToImport: MutableSet<String>) : PsiTypeVisitor<Type>() {
+class TypeVisitor(private val converter: Converter) : PsiTypeVisitor<Type>() {
+
+ private val typeConverter: TypeConverter = converter.typeConverter
+
override fun visitPrimitiveType(primitiveType: PsiPrimitiveType): Type {
val name = primitiveType.getCanonicalText()
return if (name == "void") {
@@ -45,100 +49,86 @@ class TypeVisitor(private val converter: TypeConverter, private val importNames:
}
override fun visitArrayType(arrayType: PsiArrayType): Type {
- return ArrayType(converter.convertType(arrayType.getComponentType()), Nullability.Default, converter.settings)
+ return ArrayType(typeConverter.convertType(arrayType.getComponentType()), Nullability.Default, converter.settings)
}
override fun visitClassType(classType: PsiClassType): Type {
- val identifier = constructClassTypeIdentifier(classType)
- val resolvedClassTypeParams = createRawTypesForResolvedReference(classType)
- if (classType.getParameterCount() == 0 && resolvedClassTypeParams.size() > 0) {
- val starParamList = ArrayList<Type>()
- if (resolvedClassTypeParams.size() == 1) {
- if ((resolvedClassTypeParams.single() as ClassType).name.name == "Any") {
- starParamList.add(StarProjectionType())
- return ClassType(identifier, starParamList, Nullability.Default, converter.settings)
- }
- else {
- return ClassType(identifier, resolvedClassTypeParams, Nullability.Default, converter.settings)
- }
- }
- else {
- return ClassType(identifier, resolvedClassTypeParams, Nullability.Default, converter.settings)
- }
- }
- else {
- return ClassType(identifier, converter.convertTypes(classType.getParameters()), Nullability.Default, converter.settings)
- }
+ val refElement = constructReferenceElement(classType)
+ return ClassType(refElement, Nullability.Default, converter.settings)
}
- private fun constructClassTypeIdentifier(classType: PsiClassType): Identifier {
+ private fun constructReferenceElement(classType: PsiClassType): ReferenceElement {
+ val typeArgs = convertTypeArgs(classType)
+
val psiClass = classType.resolve()
if (psiClass != null) {
val javaClassName = psiClass.getQualifiedName()
val kotlinClassName = toKotlinTypesMap[javaClassName]
if (kotlinClassName != null) {
val kotlinShortName = getShortName(kotlinClassName)
- if (kotlinShortName == getShortName(javaClassName!!) && importNames.contains(getPackageName(javaClassName) + ".*")) {
- classesToImport.add(kotlinClassName)
+ if (kotlinShortName == getShortName(javaClassName!!) && converter.importNames.contains(getPackageName(javaClassName) + ".*")) {
+ converter.importsToAdd?.add(kotlinClassName)
}
- return Identifier(kotlinShortName).assignNoPrototype()
+ return ReferenceElement(Identifier(kotlinShortName).assignNoPrototype(), typeArgs).assignNoPrototype()
}
}
if (classType is PsiClassReferenceType) {
- val reference = classType.getReference()
- if (reference.isQualified()) {
- var result = Identifier.toKotlin(reference.getReferenceName()!!)
- var qualifier = reference.getQualifier()
- while (qualifier != null) {
- val codeRefElement = qualifier as PsiJavaCodeReferenceElement
- result = Identifier.toKotlin(codeRefElement.getReferenceName()!!) + "." + result
- qualifier = codeRefElement.getQualifier()
- }
- return Identifier(result).assignNoPrototype()
- }
+ return converter.convertCodeReferenceElement(classType.getReference(), hasExternalQualifier = false, typeArgsConverted = typeArgs)
}
- return Identifier(classType.getClassName() ?: "").assignNoPrototype()
+ return ReferenceElement(Identifier(classType.getClassName() ?: "").assignNoPrototype(), typeArgs).assignNoPrototype()
}
private fun getPackageName(className: String): String = className.substring(0, className.lastIndexOf('.'))
private fun getShortName(className: String): String = className.substring(className.lastIndexOf('.') + 1)
+ private fun convertTypeArgs(classType: PsiClassType): List<Type> {
+ val resolvedClassTypeParams = createRawTypesForResolvedReference(classType)
+
+ if (classType.getParameterCount() == 0 && resolvedClassTypeParams.size() > 0) {
+ if ((resolvedClassTypeParams.singleOrNull2() as? ClassType)?.referenceElement?.name?.name == "Any") {
+ return listOf(StarProjectionType().assignNoPrototype())
+ }
+ else {
+ return resolvedClassTypeParams
+ }
+ }
+ else {
+ return typeConverter.convertTypes(classType.getParameters())
+ }
+ }
+
private fun createRawTypesForResolvedReference(classType: PsiClassType): List<Type> {
- val typeParams = LinkedList<Type>()
+ val typeArgs = ArrayList<Type>()
if (classType is PsiClassReferenceType) {
val resolve = classType.getReference().resolve()
if (resolve is PsiClass) {
for (typeParam in resolve.getTypeParameters()) {
val superTypes = typeParam.getSuperTypes()
val boundType = if (superTypes.size > 0) {
- ClassType(constructClassTypeIdentifier(superTypes[0]),
- converter.convertTypes(superTypes[0].getParameters()),
- Nullability.Default,
- converter.settings)
+ ClassType(constructReferenceElement(superTypes.first()), Nullability.Default, converter.settings)
}
else {
StarProjectionType()
}
- typeParams.add(boundType)
+ typeArgs.add(boundType)
}
}
}
-
- return typeParams
+ return typeArgs
}
override fun visitWildcardType(wildcardType: PsiWildcardType): Type {
return when {
- wildcardType.isExtends() -> OutProjectionType(converter.convertType(wildcardType.getExtendsBound()))
- wildcardType.isSuper() -> InProjectionType(converter.convertType(wildcardType.getSuperBound()))
+ wildcardType.isExtends() -> OutProjectionType(typeConverter.convertType(wildcardType.getExtendsBound()))
+ wildcardType.isSuper() -> InProjectionType(typeConverter.convertType(wildcardType.getSuperBound()))
else -> StarProjectionType()
}
}
override fun visitEllipsisType(ellipsisType: PsiEllipsisType): Type {
- return VarArgType(converter.convertType(ellipsisType.getComponentType()))
+ return VarArgType(typeConverter.convertType(ellipsisType.getComponentType()))
}
class object {
diff --git a/j2k/tests/test/org/jetbrains/jet/j2k/test/JavaToKotlinConverterTestGenerated.java b/j2k/tests/test/org/jetbrains/jet/j2k/test/JavaToKotlinConverterTestGenerated.java
index 583bce28b59c2..5c7acbdb32578 100644
--- a/j2k/tests/test/org/jetbrains/jet/j2k/test/JavaToKotlinConverterTestGenerated.java
+++ b/j2k/tests/test/org/jetbrains/jet/j2k/test/JavaToKotlinConverterTestGenerated.java
@@ -891,6 +891,31 @@ public void testMethodCallInFactoryFun() throws Exception {
doTest("j2k/tests/testData/ast/constructors/methodCallInFactoryFun.java");
}
+ @TestMetadata("nestedClassNameInParameterDefaults.java")
+ public void testNestedClassNameInParameterDefaults() throws Exception {
+ doTest("j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults.java");
+ }
+
+ @TestMetadata("nestedClassNameInParameterDefaults2.java")
+ public void testNestedClassNameInParameterDefaults2() throws Exception {
+ doTest("j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults2.java");
+ }
+
+ @TestMetadata("nestedClassNameInParameterDefaults3.java")
+ public void testNestedClassNameInParameterDefaults3() throws Exception {
+ doTest("j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults3.java");
+ }
+
+ @TestMetadata("nestedClassNameInParameterDefaults4.java")
+ public void testNestedClassNameInParameterDefaults4() throws Exception {
+ doTest("j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults4.java");
+ }
+
+ @TestMetadata("nestedClassNameInSuperParameters.java")
+ public void testNestedClassNameInSuperParameters() throws Exception {
+ doTest("j2k/tests/testData/ast/constructors/nestedClassNameInSuperParameters.java");
+ }
+
@TestMetadata("noPrimary.java")
public void testNoPrimary() throws Exception {
doTest("j2k/tests/testData/ast/constructors/noPrimary.java");
@@ -1732,6 +1757,16 @@ public void testKt_1074() throws Exception {
doTest("j2k/tests/testData/ast/issues/kt-1074.java");
}
+ @TestMetadata("kt-5294.java")
+ public void testKt_5294() throws Exception {
+ doTest("j2k/tests/testData/ast/issues/kt-5294.java");
+ }
+
+ @TestMetadata("kt-5400.java")
+ public void testKt_5400() throws Exception {
+ doTest("j2k/tests/testData/ast/issues/kt-5400.java");
+ }
+
@TestMetadata("kt-543.java")
public void testKt_543() throws Exception {
doTest("j2k/tests/testData/ast/issues/kt-543.java");
diff --git a/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults.java b/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults.java
new file mode 100644
index 0000000000000..7a3c0871328f9
--- /dev/null
+++ b/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults.java
@@ -0,0 +1,14 @@
+class A {
+ A(Nested nested) {
+ }
+
+ A() {
+ this(new Nested(Nested.FIELD));
+ }
+
+ static class Nested {
+ Nested(int p){}
+
+ public static final int FIELD = 0;
+ }
+}
\ No newline at end of file
diff --git a/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults.kt b/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults.kt
new file mode 100644
index 0000000000000..2b1329fa6e2f0
--- /dev/null
+++ b/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults.kt
@@ -0,0 +1,9 @@
+class A(nested: A.Nested = A.Nested(A.Nested.FIELD)) {
+
+ class Nested(p: Int) {
+ class object {
+
+ public val FIELD: Int = 0
+ }
+ }
+}
\ No newline at end of file
diff --git a/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults2.java b/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults2.java
new file mode 100644
index 0000000000000..bc5ec29ece16e
--- /dev/null
+++ b/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults2.java
@@ -0,0 +1,20 @@
+import A.Nested;
+
+class A {
+ A(Nested nested) {
+ }
+
+ A() {
+ this(new Nested(Nested.FIELD));
+ }
+
+ static class Nested {
+ Nested(int p){}
+
+ public static final int FIELD = 0;
+ }
+}
+
+class B {
+ Nested nested;
+}
\ No newline at end of file
diff --git a/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults2.kt b/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults2.kt
new file mode 100644
index 0000000000000..9e82b2b95da77
--- /dev/null
+++ b/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults2.kt
@@ -0,0 +1,15 @@
+import A.Nested
+
+class A(nested: Nested = Nested(Nested.FIELD)) {
+
+ class Nested(p: Int) {
+ class object {
+
+ public val FIELD: Int = 0
+ }
+ }
+}
+
+class B {
+ var nested: Nested
+}
\ No newline at end of file
diff --git a/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults3.java b/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults3.java
new file mode 100644
index 0000000000000..7ead1b36cbde4
--- /dev/null
+++ b/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults3.java
@@ -0,0 +1,22 @@
+package pack;
+
+import static pack.A.Nested;
+
+class A {
+ A(Nested nested) {
+ }
+
+ A() {
+ this(new Nested(Nested.FIELD));
+ }
+
+ static class Nested {
+ Nested(int p){}
+
+ public static final int FIELD = 0;
+ }
+}
+
+class B {
+ Nested nested;
+}
\ No newline at end of file
diff --git a/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults3.kt b/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults3.kt
new file mode 100644
index 0000000000000..2cbf12aa27691
--- /dev/null
+++ b/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults3.kt
@@ -0,0 +1,17 @@
+package pack
+
+import pack.A.Nested
+
+class A(nested: Nested = Nested(Nested.FIELD)) {
+
+ class Nested(p: Int) {
+ class object {
+
+ public val FIELD: Int = 0
+ }
+ }
+}
+
+class B {
+ var nested: Nested
+}
\ No newline at end of file
diff --git a/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults4.java b/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults4.java
new file mode 100644
index 0000000000000..13c194022f6b2
--- /dev/null
+++ b/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults4.java
@@ -0,0 +1,22 @@
+package pack;
+
+import static pack.A.*;
+
+class A {
+ A(Nested nested) {
+ }
+
+ A() {
+ this(new Nested(Nested.FIELD));
+ }
+
+ static class Nested {
+ Nested(int p){}
+
+ public static final int FIELD = 0;
+ }
+}
+
+class B {
+ Nested nested;
+}
\ No newline at end of file
diff --git a/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults4.kt b/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults4.kt
new file mode 100644
index 0000000000000..9fadc53f7993f
--- /dev/null
+++ b/j2k/tests/testData/ast/constructors/nestedClassNameInParameterDefaults4.kt
@@ -0,0 +1,17 @@
+package pack
+
+import pack.A.*
+
+class A(nested: Nested = Nested(Nested.FIELD)) {
+
+ class Nested(p: Int) {
+ class object {
+
+ public val FIELD: Int = 0
+ }
+ }
+}
+
+class B {
+ var nested: Nested
+}
\ No newline at end of file
diff --git a/j2k/tests/testData/ast/constructors/nestedClassNameInSuperParameters.java b/j2k/tests/testData/ast/constructors/nestedClassNameInSuperParameters.java
new file mode 100644
index 0000000000000..41d02ece96f5b
--- /dev/null
+++ b/j2k/tests/testData/ast/constructors/nestedClassNameInSuperParameters.java
@@ -0,0 +1,15 @@
+class Base {
+ Base(Nested nested){}
+
+ static class Nested {
+ Nested(int p){}
+
+ public static final int FIELD = 0;
+ }
+}
+
+class Derived extends Base {
+ Derived() {
+ super(new Nested(Nested.FIELD));
+ }
+}
\ No newline at end of file
diff --git a/j2k/tests/testData/ast/constructors/nestedClassNameInSuperParameters.kt b/j2k/tests/testData/ast/constructors/nestedClassNameInSuperParameters.kt
new file mode 100644
index 0000000000000..00bb71e559e81
--- /dev/null
+++ b/j2k/tests/testData/ast/constructors/nestedClassNameInSuperParameters.kt
@@ -0,0 +1,11 @@
+class Base(nested: Base.Nested) {
+
+ class Nested(p: Int) {
+ class object {
+
+ public val FIELD: Int = 0
+ }
+ }
+}
+
+class Derived : Base(Base.Nested(Base.Nested.FIELD))
\ No newline at end of file
diff --git a/j2k/tests/testData/ast/issues/kt-5294.java b/j2k/tests/testData/ast/issues/kt-5294.java
new file mode 100644
index 0000000000000..cdb89eaf4c39c
--- /dev/null
+++ b/j2k/tests/testData/ast/issues/kt-5294.java
@@ -0,0 +1,11 @@
+import java.util.List;
+
+class X {
+ private final List<Y> list;
+
+ X(List<Y> list) {
+ this.list = list;
+ }
+
+ class Y{}
+}
\ No newline at end of file
diff --git a/j2k/tests/testData/ast/issues/kt-5294.kt b/j2k/tests/testData/ast/issues/kt-5294.kt
new file mode 100644
index 0000000000000..afbba72669bce
--- /dev/null
+++ b/j2k/tests/testData/ast/issues/kt-5294.kt
@@ -0,0 +1,4 @@
+class X(private val list: List<X.Y>) {
+
+ inner class Y
+}
\ No newline at end of file
diff --git a/j2k/tests/testData/ast/issues/kt-5400.java b/j2k/tests/testData/ast/issues/kt-5400.java
new file mode 100644
index 0000000000000..c4c379a5f163e
--- /dev/null
+++ b/j2k/tests/testData/ast/issues/kt-5400.java
@@ -0,0 +1,7 @@
+class Base {
+ class Nested{}
+}
+
+class Derived extends Base {
+ Nested field;
+}
\ No newline at end of file
diff --git a/j2k/tests/testData/ast/issues/kt-5400.kt b/j2k/tests/testData/ast/issues/kt-5400.kt
new file mode 100644
index 0000000000000..ca4e330311275
--- /dev/null
+++ b/j2k/tests/testData/ast/issues/kt-5400.kt
@@ -0,0 +1,7 @@
+class Base {
+ inner class Nested
+}
+
+class Derived : Base() {
+ var field: Base.Nested
+}
\ No newline at end of file
diff --git a/j2k/tests/testData/ast/toKotlinClasses/TypeParameterBound.kt b/j2k/tests/testData/ast/toKotlinClasses/TypeParameterBound.kt
index 89db7b8c731fc..15cd57f1d3c8d 100644
--- a/j2k/tests/testData/ast/toKotlinClasses/TypeParameterBound.kt
+++ b/j2k/tests/testData/ast/toKotlinClasses/TypeParameterBound.kt
@@ -1,6 +1,6 @@
import java.util.*
-import kotlin.List
import kotlin.Iterator
+import kotlin.List
trait I<T : List<Iterator<String>>>
|
2ed3dbbf674af24f8c8a694fad468377c705918a
|
elasticsearch
|
Test deleting the percolate type differently.- Instead of checking the types exist api, register a DocumentTypeListener that- notifies when percolate queries have been cleared.--
|
p
|
https://github.com/elastic/elasticsearch
|
diff --git a/src/test/java/org/elasticsearch/test/integration/AbstractNodesTests.java b/src/test/java/org/elasticsearch/test/integration/AbstractNodesTests.java
index 99d361408d4e4..6da2f86bdb5a6 100644
--- a/src/test/java/org/elasticsearch/test/integration/AbstractNodesTests.java
+++ b/src/test/java/org/elasticsearch/test/integration/AbstractNodesTests.java
@@ -33,11 +33,13 @@
import org.junit.BeforeClass;
import org.junit.Ignore;
+import java.util.ArrayList;
+import java.util.List;
import java.util.Map;
import static com.google.common.collect.Maps.newHashMap;
-import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.elasticsearch.common.settings.ImmutableSettings.Builder.EMPTY_SETTINGS;
+import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.elasticsearch.node.NodeBuilder.nodeBuilder;
@Ignore
@@ -123,6 +125,12 @@ public void closeNode(String id) {
}
+ public List<Node> nodes() {
+ synchronized (AbstractNodesTests.class) {
+ return new ArrayList<Node>(nodes.values());
+ }
+ }
+
public Node node(String id) {
synchronized (AbstractNodesTests.class) {
return nodes.get(id);
diff --git a/src/test/java/org/elasticsearch/test/integration/percolator/DeletePercolatorTypeTests.java b/src/test/java/org/elasticsearch/test/integration/percolator/DeletePercolatorTypeTests.java
new file mode 100644
index 0000000000000..f96815ba8b5ed
--- /dev/null
+++ b/src/test/java/org/elasticsearch/test/integration/percolator/DeletePercolatorTypeTests.java
@@ -0,0 +1,111 @@
+package org.elasticsearch.test.integration.percolator;
+
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
+import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
+import org.elasticsearch.action.percolate.PercolateResponse;
+import org.elasticsearch.client.Requests;
+import org.elasticsearch.common.Priority;
+import org.elasticsearch.index.mapper.DocumentTypeListener;
+import org.elasticsearch.index.mapper.MapperService;
+import org.elasticsearch.indices.IndicesService;
+import org.elasticsearch.node.Node;
+import org.elasticsearch.node.internal.InternalNode;
+import org.elasticsearch.test.integration.AbstractNodesTests;
+import org.junit.Test;
+
+import java.util.List;
+import java.util.concurrent.CountDownLatch;
+
+import static org.elasticsearch.action.percolate.PercolateSourceBuilder.docBuilder;
+import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ */
+public class DeletePercolatorTypeTests extends AbstractNodesTests {
+
+ public void beforeClass() throws Exception {
+ startNode("node1");
+ startNode("node2");
+ }
+
+ @Test
+ public void testDeletePercolatorType() throws Exception {
+ DeleteIndexResponse deleteIndexResponse = client().admin().indices().prepareDelete().execute().actionGet();
+ assertThat("Delete Index failed - not acked", deleteIndexResponse.isAcknowledged(), equalTo(true));
+ ensureGreen();
+
+ client().admin().indices().prepareCreate("test1").execute().actionGet();
+ client().admin().indices().prepareCreate("test2").execute().actionGet();
+ ensureGreen();
+
+ client().prepareIndex("test1", "_percolator", "1")
+ .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject())
+ .execute().actionGet();
+ client().prepareIndex("test2", "_percolator", "1")
+ .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject())
+ .execute().actionGet();
+
+ PercolateResponse response = client().preparePercolate()
+ .setIndices("test1", "test2").setDocumentType("type").setOnlyCount(true)
+ .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "b").endObject()))
+ .execute().actionGet();
+ assertThat(response.getCount(), equalTo(2l));
+
+ CountDownLatch test1Latch = createCountDownLatch("test1");
+ CountDownLatch test2Latch =createCountDownLatch("test2");
+
+ client().admin().indices().prepareDeleteMapping("test1").setType("_percolator").execute().actionGet();
+ test1Latch.await();
+
+ response = client().preparePercolate()
+ .setIndices("test1", "test2").setDocumentType("type").setOnlyCount(true)
+ .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "b").endObject()))
+ .execute().actionGet();
+ assertNoFailures(response);
+ assertThat(response.getCount(), equalTo(1l));
+
+ client().admin().indices().prepareDeleteMapping("test2").setType("_percolator").execute().actionGet();
+ test2Latch.await();
+
+ // Percolate api should return 0 matches, because all _percolate types have been removed.
+ response = client().preparePercolate()
+ .setIndices("test1", "test2").setDocumentType("type").setOnlyCount(true)
+ .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "b").endObject()))
+ .execute().actionGet();
+ assertNoFailures(response);
+ assertThat(response.getCount(), equalTo(0l));
+ }
+
+ private CountDownLatch createCountDownLatch(String index) {
+ List<Node> nodes = nodes();
+ final CountDownLatch latch = new CountDownLatch(nodes.size());
+ for (Node node : nodes) {
+ IndicesService indexServices = ((InternalNode) node).injector().getInstance(IndicesService.class);
+ MapperService mapperService = indexServices.indexService(index).mapperService();
+ mapperService.addTypeListener(new DocumentTypeListener() {
+ @Override
+ public void created(String type) {
+ }
+
+ @Override
+ public void removed(String type) {
+ latch.countDown();
+ }
+ });
+ }
+ return latch;
+ }
+
+ public ClusterHealthStatus ensureGreen() {
+ ClusterHealthResponse actionGet = client().admin().cluster()
+ .health(Requests.clusterHealthRequest().waitForGreenStatus().waitForEvents(Priority.LANGUID).waitForRelocatingShards(0)).actionGet();
+ assertThat(actionGet.isTimedOut(), equalTo(false));
+ assertThat(actionGet.getStatus(), equalTo(ClusterHealthStatus.GREEN));
+ return actionGet.getStatus();
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/test/integration/percolator/SimplePercolatorTests.java b/src/test/java/org/elasticsearch/test/integration/percolator/SimplePercolatorTests.java
index 9d9177c3d913e..0d69879c0be6c 100644
--- a/src/test/java/org/elasticsearch/test/integration/percolator/SimplePercolatorTests.java
+++ b/src/test/java/org/elasticsearch/test/integration/percolator/SimplePercolatorTests.java
@@ -22,14 +22,12 @@
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse;
-import org.elasticsearch.action.admin.indices.exists.types.TypesExistsResponse;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.count.CountResponse;
import org.elasticsearch.action.percolate.PercolateResponse;
import org.elasticsearch.action.percolate.PercolateSourceBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.IgnoreIndices;
-import org.elasticsearch.client.Client;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.ImmutableSettings;
@@ -1028,59 +1026,6 @@ public void testCountPercolatingExistingDocs() throws Exception {
assertThat(response.getMatches(), emptyArray());
}
- @Test
- public void testDeletePercolatorType() throws Exception {
- client().admin().indices().prepareCreate("test1").execute().actionGet();
- client().admin().indices().prepareCreate("test2").execute().actionGet();
- ensureGreen();
-
- client().prepareIndex("test1", "_percolator", "1")
- .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject())
- .execute().actionGet();
- client().prepareIndex("test2", "_percolator", "1")
- .setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject())
- .execute().actionGet();
-
- PercolateResponse response = client().preparePercolate()
- .setIndices("test1", "test2").setDocumentType("type").setOnlyCount(true)
- .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "b").endObject()))
- .execute().actionGet();
- assertThat(response.getCount(), equalTo(2l));
-
- client().admin().indices().prepareDeleteMapping("test1").setType("_percolator").execute().actionGet();
- percolatorTypeRemoved("test1");
- response = client().preparePercolate()
- .setIndices("test1", "test2").setDocumentType("type").setOnlyCount(true)
- .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "b").endObject()))
- .execute().actionGet();
- assertNoFailures(response);
- assertThat(response.getCount(), equalTo(1l));
-
- client().admin().indices().prepareDeleteMapping("test2").setType("_percolator").execute().actionGet();
- percolatorTypeRemoved("test2");
-
- // Percolate api should return 0 matches, because all _percolate types have been removed.
- response = client().preparePercolate()
- .setIndices("test1", "test2").setDocumentType("type").setOnlyCount(true)
- .setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "b").endObject()))
- .execute().actionGet();
- assertNoFailures(response);
- assertThat(response.getCount(), equalTo(0l));
- }
-
- private void percolatorTypeRemoved(String index) {
- my_goto: while (true) {
- for (Client client : clients()) {
- TypesExistsResponse existsResponse =
- client.admin().indices().prepareTypesExists(index).setTypes("_percolator").execute().actionGet();
- if (existsResponse.isExists()) {
- continue my_goto;
- }
- }
- break;
- }
- }
-
public void testPercolateSizingWithQueryAndFilter() throws Exception {
client().admin().indices().prepareCreate("test").execute().actionGet();
ensureGreen();
|
6ebd8e3f2a61591f5d017eb43389efef95ec26a4
|
hbase
|
HBASE-8355:- BaseRegionObserver-pre(Compact|Flush|Store)ScannerOpen returns null (Andrew- Purtell, Jesse Yates)--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1483094 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
index 6240baf528fc..9ea9ffda2a48 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
@@ -83,7 +83,7 @@ public void postClose(ObserverContext<RegionCoprocessorEnvironment> e,
public InternalScanner preFlushScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
final Store store, final KeyValueScanner memstoreScanner, final InternalScanner s)
throws IOException {
- return null;
+ return s;
}
@Override
@@ -174,7 +174,7 @@ public InternalScanner preCompactScannerOpen(
final ObserverContext<RegionCoprocessorEnvironment> c, final Store store,
List<? extends KeyValueScanner> scanners, final ScanType scanType, final long earliestPutTs,
final InternalScanner s) throws IOException {
- return null;
+ return s;
}
@Override
@@ -342,7 +342,7 @@ public RegionScanner preScannerOpen(final ObserverContext<RegionCoprocessorEnvir
public KeyValueScanner preStoreScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
final Store store, final Scan scan, final NavigableSet<byte[]> targetCols,
final KeyValueScanner s) throws IOException {
- return null;
+ return s;
}
@Override
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
new file mode 100644
index 000000000000..07033a67803d
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
@@ -0,0 +1,300 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.coprocessor;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.NavigableSet;
+import java.util.concurrent.CountDownLatch;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Coprocessor;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.filter.FilterBase;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.HRegionServer;
+import org.apache.hadoop.hbase.regionserver.InternalScanner;
+import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
+import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
+import org.apache.hadoop.hbase.regionserver.ScanType;
+import org.apache.hadoop.hbase.regionserver.Store;
+import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.StoreScanner;
+import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(SmallTests.class)
+public class TestRegionObserverScannerOpenHook {
+ private static HBaseTestingUtility UTIL = new HBaseTestingUtility();
+ static final Path DIR = UTIL.getDataTestDir();
+
+ public static class NoDataFilter extends FilterBase {
+
+ @Override
+ public ReturnCode filterKeyValue(KeyValue ignored) throws IOException {
+ return ReturnCode.SKIP;
+ }
+
+ @Override
+ public boolean filterAllRemaining() throws IOException {
+ return true;
+ }
+
+ @Override
+ public boolean filterRow() throws IOException {
+ return true;
+ }
+ }
+
+ /**
+ * Do the same logic as the {@link BaseRegionObserver}. Needed since {@link BaseRegionObserver} is
+ * an abstract class.
+ */
+ public static class EmptyRegionObsever extends BaseRegionObserver {
+ }
+
+ /**
+ * Don't return any data from a scan by creating a custom {@link StoreScanner}.
+ */
+ public static class NoDataFromScan extends BaseRegionObserver {
+ @Override
+ public KeyValueScanner preStoreScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c,
+ Store store, Scan scan, NavigableSet<byte[]> targetCols, KeyValueScanner s)
+ throws IOException {
+ scan.setFilter(new NoDataFilter());
+ return new StoreScanner(store, store.getScanInfo(), scan, targetCols);
+ }
+ }
+
+ /**
+ * Don't allow any data in a flush by creating a custom {@link StoreScanner}.
+ */
+ public static class NoDataFromFlush extends BaseRegionObserver {
+ @Override
+ public InternalScanner preFlushScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c,
+ Store store, KeyValueScanner memstoreScanner, InternalScanner s) throws IOException {
+ Scan scan = new Scan();
+ scan.setFilter(new NoDataFilter());
+ return new StoreScanner(store, store.getScanInfo(), scan,
+ Collections.singletonList(memstoreScanner), ScanType.COMPACT_RETAIN_DELETES,
+ store.getSmallestReadPoint(), HConstants.OLDEST_TIMESTAMP);
+ }
+ }
+
+ /**
+ * Don't allow any data to be written out in the compaction by creating a custom
+ * {@link StoreScanner}.
+ */
+ public static class NoDataFromCompaction extends BaseRegionObserver {
+ @Override
+ public InternalScanner preCompactScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c,
+ Store store, List<? extends KeyValueScanner> scanners, ScanType scanType,
+ long earliestPutTs, InternalScanner s) throws IOException {
+ Scan scan = new Scan();
+ scan.setFilter(new NoDataFilter());
+ return new StoreScanner(store, store.getScanInfo(), scan, scanners,
+ ScanType.COMPACT_RETAIN_DELETES, store.getSmallestReadPoint(),
+ HConstants.OLDEST_TIMESTAMP);
+ }
+ }
+
+ HRegion initHRegion(byte[] tableName, String callingMethod, Configuration conf,
+ byte[]... families) throws IOException {
+ HTableDescriptor htd = new HTableDescriptor(tableName);
+ for (byte[] family : families) {
+ htd.addFamily(new HColumnDescriptor(family));
+ }
+ HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
+ Path path = new Path(DIR + callingMethod);
+ HRegion r = HRegion.createHRegion(info, path, conf, htd);
+ // this following piece is a hack. currently a coprocessorHost
+ // is secretly loaded at OpenRegionHandler. we don't really
+ // start a region server here, so just manually create cphost
+ // and set it to region.
+ RegionCoprocessorHost host = new RegionCoprocessorHost(r, null, conf);
+ r.setCoprocessorHost(host);
+ return r;
+ }
+
+ @Test
+ public void testRegionObserverScanTimeStacking() throws Exception {
+ byte[] ROW = Bytes.toBytes("testRow");
+ byte[] TABLE = Bytes.toBytes(getClass().getName());
+ byte[] A = Bytes.toBytes("A");
+ byte[][] FAMILIES = new byte[][] { A };
+
+ Configuration conf = HBaseConfiguration.create();
+ HRegion region = initHRegion(TABLE, getClass().getName(), conf, FAMILIES);
+ RegionCoprocessorHost h = region.getCoprocessorHost();
+ h.load(NoDataFromScan.class, Coprocessor.PRIORITY_HIGHEST, conf);
+ h.load(EmptyRegionObsever.class, Coprocessor.PRIORITY_USER, conf);
+
+ Put put = new Put(ROW);
+ put.add(A, A, A);
+ region.put(put);
+
+ Get get = new Get(ROW);
+ Result r = region.get(get);
+ assertNull(
+ "Got an unexpected number of rows - no data should be returned with the NoDataFromScan coprocessor. Found: "
+ + r, r.list());
+ }
+
+ @Test
+ public void testRegionObserverFlushTimeStacking() throws Exception {
+ byte[] ROW = Bytes.toBytes("testRow");
+ byte[] TABLE = Bytes.toBytes(getClass().getName());
+ byte[] A = Bytes.toBytes("A");
+ byte[][] FAMILIES = new byte[][] { A };
+
+ Configuration conf = HBaseConfiguration.create();
+ HRegion region = initHRegion(TABLE, getClass().getName(), conf, FAMILIES);
+ RegionCoprocessorHost h = region.getCoprocessorHost();
+ h.load(NoDataFromFlush.class, Coprocessor.PRIORITY_HIGHEST, conf);
+ h.load(EmptyRegionObsever.class, Coprocessor.PRIORITY_USER, conf);
+
+ // put a row and flush it to disk
+ Put put = new Put(ROW);
+ put.add(A, A, A);
+ region.put(put);
+ region.flushcache();
+ Get get = new Get(ROW);
+ Result r = region.get(get);
+ assertNull(
+ "Got an unexpected number of rows - no data should be returned with the NoDataFromScan coprocessor. Found: "
+ + r, r.list());
+ }
+
+ /**
+ * Unfortunately, the easiest way to test this is to spin up a mini-cluster since we want to do
+ * the usual compaction mechanism on the region, rather than going through the backdoor to the
+ * region
+ */
+ @Test
+ @Category(MediumTests.class)
+ public void testRegionObserverCompactionTimeStacking() throws Exception {
+ // setup a mini cluster so we can do a real compaction on a region
+ Configuration conf = UTIL.getConfiguration();
+ conf.setInt("hbase.hstore.compaction.min", 2);
+ UTIL.startMiniCluster();
+ String tableName = "testRegionObserverCompactionTimeStacking";
+ byte[] ROW = Bytes.toBytes("testRow");
+ byte[] A = Bytes.toBytes("A");
+ HTableDescriptor desc = new HTableDescriptor(tableName);
+ desc.addFamily(new HColumnDescriptor(A));
+ desc.addCoprocessor(EmptyRegionObsever.class.getName(), null, Coprocessor.PRIORITY_USER, null);
+ desc.addCoprocessor(NoDataFromCompaction.class.getName(), null, Coprocessor.PRIORITY_HIGHEST,
+ null);
+
+ HBaseAdmin admin = UTIL.getHBaseAdmin();
+ admin.createTable(desc);
+
+ HTable table = new HTable(conf, desc.getName());
+
+ // put a row and flush it to disk
+ Put put = new Put(ROW);
+ put.add(A, A, A);
+ table.put(put);
+ table.flushCommits();
+
+ HRegionServer rs = UTIL.getRSForFirstRegionInTable(desc.getName());
+ List<HRegion> regions = rs.getOnlineRegions(desc.getName());
+ assertEquals("More than 1 region serving test table with 1 row", 1, regions.size());
+ HRegion region = regions.get(0);
+ admin.flush(region.getRegionName());
+
+ // put another row and flush that too
+ put = new Put(Bytes.toBytes("anotherrow"));
+ put.add(A, A, A);
+ table.put(put);
+ table.flushCommits();
+ admin.flush(region.getRegionName());
+
+ // run a compaction, which normally would should get rid of the data
+ Store s = region.getStores().get(A);
+ CountDownLatch latch = new CountDownLatch(1);
+ WaitableCompactionRequest request = new WaitableCompactionRequest(s.getStorefiles(), latch);
+ rs.compactSplitThread.requestCompaction(region, s,
+ "compact for testRegionObserverCompactionTimeStacking", Store.PRIORITY_USER, request);
+ // wait for the compaction to complete
+ latch.await();
+
+ // check both rows to ensure that they aren't there
+ Get get = new Get(ROW);
+ Result r = table.get(get);
+ assertNull(
+ "Got an unexpected number of rows - no data should be returned with the NoDataFromScan coprocessor. Found: "
+ + r, r.list());
+
+ get = new Get(Bytes.toBytes("anotherrow"));
+ r = table.get(get);
+ assertNull(
+ "Got an unexpected number of rows - no data should be returned with the NoDataFromScan coprocessor Found: "
+ + r, r.list());
+
+ table.close();
+ UTIL.shutdownMiniCluster();
+ }
+
+ /**
+ * A simple compaction on which you can wait for the passed in latch until the compaction finishes
+ * (either successfully or if it failed).
+ */
+ public static class WaitableCompactionRequest extends CompactionRequest {
+ private CountDownLatch done;
+
+ /**
+ * Constructor for a custom compaction. Uses the setXXX methods to update the state of the
+ * compaction before being used.
+ */
+ public WaitableCompactionRequest(Collection<StoreFile> files, CountDownLatch finished) {
+ super(files);
+ this.done = finished;
+ }
+
+ @Override
+ public void afterExecute() {
+ this.done.countDown();
+ }
+ }
+}
\ No newline at end of file
|
e1ab934e5d80c32b63db7569f76b12375fe2a6f7
|
hbase
|
HBASE-798 Provide Client API to explicitly lock- and unlock rows (Jonathan Gray via Jim Kellerman)--git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@685391 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/hbase
|
diff --git a/CHANGES.txt b/CHANGES.txt
index becddd4e9cfb..216aac8c4fb9 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -32,6 +32,8 @@ Release 0.3.0 - Unreleased
NEW FEATURES
HBASE-787 Postgresql to HBase table replication example (Tim Sell via Stack)
+ HBASE-798 Provide Client API to explicitly lock and unlock rows (Jonathan
+ Gray via Jim Kellerman)
OPTIMIZATIONS
diff --git a/src/java/org/apache/hadoop/hbase/HMerge.java b/src/java/org/apache/hadoop/hbase/HMerge.java
index f43b35e1fc5b..f87b6c7d5778 100644
--- a/src/java/org/apache/hadoop/hbase/HMerge.java
+++ b/src/java/org/apache/hadoop/hbase/HMerge.java
@@ -373,7 +373,7 @@ protected void updateMeta(final byte [] oldRegion1,
b.delete(COL_STARTCODE);
b.delete(COL_SPLITA);
b.delete(COL_SPLITB);
- root.batchUpdate(b);
+ root.batchUpdate(b,null);
if(LOG.isDebugEnabled()) {
LOG.debug("updated columns in row: " + regionsToDelete[r]);
@@ -383,7 +383,7 @@ protected void updateMeta(final byte [] oldRegion1,
newInfo.setOffline(true);
BatchUpdate b = new BatchUpdate(newRegion.getRegionName());
b.put(COL_REGIONINFO, Writables.getBytes(newInfo));
- root.batchUpdate(b);
+ root.batchUpdate(b,null);
if(LOG.isDebugEnabled()) {
LOG.debug("updated columns in row: " + newRegion.getRegionName());
}
diff --git a/src/java/org/apache/hadoop/hbase/client/HTable.java b/src/java/org/apache/hadoop/hbase/client/HTable.java
index 6d086e91fc4e..676da7b6b350 100644
--- a/src/java/org/apache/hadoop/hbase/client/HTable.java
+++ b/src/java/org/apache/hadoop/hbase/client/HTable.java
@@ -639,12 +639,33 @@ public RowResult getRow(final String row, final String [] columns,
*/
public RowResult getRow(final byte [] row, final byte [][] columns,
final long ts)
+ throws IOException {
+ return getRow(row,columns,ts,null);
+ }
+
+ /**
+ * Get selected columns for the specified row at a specified timestamp
+ * using existing row lock.
+ *
+ * @param row row key
+ * @param columns Array of column names and families you want to retrieve.
+ * @param ts timestamp
+ * @param rl row lock
+ * @return RowResult is empty if row does not exist.
+ * @throws IOException
+ */
+ public RowResult getRow(final byte [] row, final byte [][] columns,
+ final long ts, final RowLock rl)
throws IOException {
return connection.getRegionServerWithRetries(
new ServerCallable<RowResult>(connection, tableName, row) {
public RowResult call() throws IOException {
+ long lockId = -1L;
+ if(rl != null) {
+ lockId = rl.getLockId();
+ }
return server.getRow(location.getRegionInfo().getRegionName(), row,
- columns, ts);
+ columns, ts, lockId);
}
}
);
@@ -1103,16 +1124,36 @@ public void deleteAll(final String row, final String column, final long ts)
* @throws IOException
*/
public void deleteAll(final byte [] row, final byte [] column, final long ts)
+ throws IOException {
+ deleteAll(row,column,ts,null);
+ }
+
+ /**
+ * Delete all cells that match the passed row and column and whose
+ * timestamp is equal-to or older than the passed timestamp, using an
+ * existing row lock.
+ * @param row Row to update
+ * @param column name of column whose value is to be deleted
+ * @param ts Delete all cells of the same timestamp or older.
+ * @param rl Existing row lock
+ * @throws IOException
+ */
+ public void deleteAll(final byte [] row, final byte [] column, final long ts,
+ final RowLock rl)
throws IOException {
connection.getRegionServerWithRetries(
new ServerCallable<Boolean>(connection, tableName, row) {
public Boolean call() throws IOException {
+ long lockId = -1L;
+ if(rl != null) {
+ lockId = rl.getLockId();
+ }
if (column != null) {
this.server.deleteAll(location.getRegionInfo().getRegionName(),
- row, column, ts);
+ row, column, ts, lockId);
} else {
this.server.deleteAll(location.getRegionInfo().getRegionName(),
- row, ts);
+ row, ts, lockId);
}
return null;
}
@@ -1160,12 +1201,32 @@ public void deleteFamily(final String row, final String family,
*/
public void deleteFamily(final byte [] row, final byte [] family,
final long timestamp)
+ throws IOException {
+ deleteFamily(row,family,timestamp,null);
+ }
+
+ /**
+ * Delete all cells for a row with matching column family with timestamps
+ * less than or equal to <i>timestamp</i>, using existing row lock.
+ *
+ * @param row The row to operate on
+ * @param family The column family to match
+ * @param timestamp Timestamp to match
+ * @param rl Existing row lock
+ * @throws IOException
+ */
+ public void deleteFamily(final byte [] row, final byte [] family,
+ final long timestamp, final RowLock rl)
throws IOException {
connection.getRegionServerWithRetries(
new ServerCallable<Boolean>(connection, tableName, row) {
public Boolean call() throws IOException {
+ long lockId = -1L;
+ if(rl != null) {
+ lockId = rl.getLockId();
+ }
server.deleteFamily(location.getRegionInfo().getRegionName(), row,
- family, timestamp);
+ family, timestamp, lockId);
return null;
}
}
@@ -1178,12 +1239,28 @@ public Boolean call() throws IOException {
* @throws IOException
*/
public synchronized void commit(final BatchUpdate batchUpdate)
+ throws IOException {
+ commit(batchUpdate,null);
+ }
+
+ /**
+ * Commit a BatchUpdate to the table using existing row lock.
+ * @param batchUpdate
+ * @param rl Existing row lock
+ * @throws IOException
+ */
+ public synchronized void commit(final BatchUpdate batchUpdate,
+ final RowLock rl)
throws IOException {
connection.getRegionServerWithRetries(
new ServerCallable<Boolean>(connection, tableName, batchUpdate.getRow()) {
public Boolean call() throws IOException {
+ long lockId = -1L;
+ if(rl != null) {
+ lockId = rl.getLockId();
+ }
server.batchUpdate(location.getRegionInfo().getRegionName(),
- batchUpdate);
+ batchUpdate, lockId);
return null;
}
}
@@ -1198,7 +1275,45 @@ public Boolean call() throws IOException {
public synchronized void commit(final List<BatchUpdate> batchUpdates)
throws IOException {
for (BatchUpdate batchUpdate : batchUpdates)
- commit(batchUpdate);
+ commit(batchUpdate,null);
+ }
+
+ /**
+ * Obtain a row lock
+ * @param row The row to lock
+ * @return rowLock RowLock containing row and lock id
+ * @throws IOException
+ */
+ public RowLock lockRow(final byte [] row)
+ throws IOException {
+ return connection.getRegionServerWithRetries(
+ new ServerCallable<RowLock>(connection, tableName, row) {
+ public RowLock call() throws IOException {
+ long lockId =
+ server.lockRow(location.getRegionInfo().getRegionName(), row);
+ RowLock rowLock = new RowLock(row,lockId);
+ return rowLock;
+ }
+ }
+ );
+ }
+
+ /**
+ * Release a row lock
+ * @param rl The row lock to release
+ * @throws IOException
+ */
+ public void unlockRow(final RowLock rl)
+ throws IOException {
+ connection.getRegionServerWithRetries(
+ new ServerCallable<Boolean>(connection, tableName, rl.getRow()) {
+ public Boolean call() throws IOException {
+ server.unlockRow(location.getRegionInfo().getRegionName(),
+ rl.getLockId());
+ return null;
+ }
+ }
+ );
}
/**
diff --git a/src/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java b/src/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java
index 0aed6af99111..46c6afc3d65e 100644
--- a/src/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java
+++ b/src/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java
@@ -111,11 +111,12 @@ public RowResult getClosestRowBefore(final byte [] regionName,
*
* @param regionName region name
* @param row row key
+ * @param lockId lock id
* @return map of values
* @throws IOException
*/
public RowResult getRow(final byte [] regionName, final byte [] row,
- final byte[][] columns, final long ts)
+ final byte[][] columns, final long ts, final long lockId)
throws IOException;
/**
@@ -123,9 +124,11 @@ public RowResult getRow(final byte [] regionName, final byte [] row,
*
* @param regionName name of the region to update
* @param b BatchUpdate
+ * @param lockId lock id
* @throws IOException
*/
- public void batchUpdate(final byte [] regionName, final BatchUpdate b)
+ public void batchUpdate(final byte [] regionName, final BatchUpdate b,
+ final long lockId)
throws IOException;
/**
@@ -136,10 +139,11 @@ public void batchUpdate(final byte [] regionName, final BatchUpdate b)
* @param row row key
* @param column column key
* @param timestamp Delete all entries that have this timestamp or older
+ * @param lockId lock id
* @throws IOException
*/
public void deleteAll(byte [] regionName, byte [] row, byte [] column,
- long timestamp)
+ long timestamp, long lockId)
throws IOException;
/**
@@ -149,9 +153,11 @@ public void deleteAll(byte [] regionName, byte [] row, byte [] column,
* @param regionName region name
* @param row row key
* @param timestamp Delete all entries that have this timestamp or older
+ * @param lockId lock id
* @throws IOException
*/
- public void deleteAll(byte [] regionName, byte [] row, long timestamp)
+ public void deleteAll(byte [] regionName, byte [] row, long timestamp,
+ long lockId)
throws IOException;
/**
@@ -162,9 +168,10 @@ public void deleteAll(byte [] regionName, byte [] row, long timestamp)
* @param row The row to operate on
* @param family The column family to match
* @param timestamp Timestamp to match
+ * @param lockId lock id
*/
public void deleteFamily(byte [] regionName, byte [] row, byte [] family,
- long timestamp)
+ long timestamp, long lockId)
throws IOException;
@@ -207,4 +214,24 @@ public long openScanner(final byte [] regionName, final byte [][] columns,
* @throws IOException
*/
public void close(long scannerId) throws IOException;
+
+ /**
+ * Opens a remote row lock.
+ *
+ * @param regionName name of region
+ * @param row row to lock
+ * @return lockId lock identifier
+ * @throws IOException
+ */
+ public long lockRow(final byte [] regionName, final byte [] row)
+ throws IOException;
+
+ /**
+ * Releases a remote row lock.
+ *
+ * @param lockId the lock id returned by lockRow
+ * @throws IOException
+ */
+ public void unlockRow(final byte [] regionName, final long lockId)
+ throws IOException;
}
\ No newline at end of file
diff --git a/src/java/org/apache/hadoop/hbase/master/BaseScanner.java b/src/java/org/apache/hadoop/hbase/master/BaseScanner.java
index 22d900369a8c..5f17ea2b62f8 100644
--- a/src/java/org/apache/hadoop/hbase/master/BaseScanner.java
+++ b/src/java/org/apache/hadoop/hbase/master/BaseScanner.java
@@ -332,7 +332,7 @@ public boolean accept(Path path) {
BatchUpdate b = new BatchUpdate(parent);
b.delete(splitColumn);
- srvr.batchUpdate(metaRegionName, b);
+ srvr.batchUpdate(metaRegionName, b, -1L);
return result;
}
diff --git a/src/java/org/apache/hadoop/hbase/master/ChangeTableState.java b/src/java/org/apache/hadoop/hbase/master/ChangeTableState.java
index 48670a56caf6..4bea278d7945 100644
--- a/src/java/org/apache/hadoop/hbase/master/ChangeTableState.java
+++ b/src/java/org/apache/hadoop/hbase/master/ChangeTableState.java
@@ -91,7 +91,7 @@ protected void postProcessMeta(MetaRegion m, HRegionInterface server)
updateRegionInfo(b, i);
b.delete(COL_SERVER);
b.delete(COL_STARTCODE);
- server.batchUpdate(m.getRegionName(), b);
+ server.batchUpdate(m.getRegionName(), b, -1L);
if (LOG.isDebugEnabled()) {
LOG.debug("updated columns in row: " + i.getRegionNameAsString());
}
diff --git a/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java b/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java
index dae8405949ee..94d31ad46918 100644
--- a/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java
+++ b/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java
@@ -52,7 +52,7 @@ protected void updateRegionInfo(HRegionInterface server, byte [] regionName,
throws IOException {
BatchUpdate b = new BatchUpdate(i.getRegionName());
b.put(COL_REGIONINFO, Writables.getBytes(i));
- server.batchUpdate(regionName, b);
+ server.batchUpdate(regionName, b, -1L);
if (LOG.isDebugEnabled()) {
LOG.debug("updated columns in row: " + i.getRegionNameAsString());
}
diff --git a/src/java/org/apache/hadoop/hbase/master/ModifyTableMeta.java b/src/java/org/apache/hadoop/hbase/master/ModifyTableMeta.java
index c7a15999d49a..ee1b915fdafb 100644
--- a/src/java/org/apache/hadoop/hbase/master/ModifyTableMeta.java
+++ b/src/java/org/apache/hadoop/hbase/master/ModifyTableMeta.java
@@ -52,7 +52,7 @@ protected void updateRegionInfo(HRegionInterface server, byte [] regionName,
throws IOException {
BatchUpdate b = new BatchUpdate(i.getRegionName());
b.put(COL_REGIONINFO, Writables.getBytes(i));
- server.batchUpdate(regionName, b);
+ server.batchUpdate(regionName, b, -1L);
LOG.debug("updated HTableDescriptor for region " + i.getRegionNameAsString());
}
diff --git a/src/java/org/apache/hadoop/hbase/master/ProcessRegionOpen.java b/src/java/org/apache/hadoop/hbase/master/ProcessRegionOpen.java
index aecced363058..022c03dc7291 100644
--- a/src/java/org/apache/hadoop/hbase/master/ProcessRegionOpen.java
+++ b/src/java/org/apache/hadoop/hbase/master/ProcessRegionOpen.java
@@ -83,7 +83,7 @@ public Boolean call() throws IOException {
BatchUpdate b = new BatchUpdate(regionInfo.getRegionName());
b.put(COL_SERVER, Bytes.toBytes(serverAddress.toString()));
b.put(COL_STARTCODE, startCode);
- server.batchUpdate(metaRegionName, b);
+ server.batchUpdate(metaRegionName, b, -1L);
if (!this.historian.isOnline()) {
// This is safest place to do the onlining of the historian in
// the master. When we get to here, we know there is a .META.
diff --git a/src/java/org/apache/hadoop/hbase/master/RegionManager.java b/src/java/org/apache/hadoop/hbase/master/RegionManager.java
index be76b8b33f63..919d4d33ffe6 100644
--- a/src/java/org/apache/hadoop/hbase/master/RegionManager.java
+++ b/src/java/org/apache/hadoop/hbase/master/RegionManager.java
@@ -560,7 +560,7 @@ public void createRegion(HRegionInfo newRegion, HRegionInterface server,
byte [] regionName = region.getRegionName();
BatchUpdate b = new BatchUpdate(regionName);
b.put(COL_REGIONINFO, Writables.getBytes(info));
- server.batchUpdate(metaRegionName, b);
+ server.batchUpdate(metaRegionName, b, -1L);
// 4. Close the new region to flush it to disk. Close its log file too.
region.close();
diff --git a/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 3363317f4a37..a15a7bc68f15 100644
--- a/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -1,4 +1,4 @@
-/**
+ /**
* Copyright 2007 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
@@ -1170,7 +1170,7 @@ public Cell[] get(byte [] row, byte [] column, long timestamp,
* @throws IOException
*/
public Map<byte [], Cell> getFull(final byte [] row,
- final Set<byte []> columns, final long ts)
+ final Set<byte []> columns, final long ts, final Integer lockid)
throws IOException {
// Check columns passed
if (columns != null) {
@@ -1179,7 +1179,7 @@ public Cell[] get(byte [] row, byte [] column, long timestamp,
}
}
HStoreKey key = new HStoreKey(row, ts);
- Integer lid = obtainRowLock(row);
+ Integer lid = getLock(lockid,row);
HashSet<HStore> storeSet = new HashSet<HStore>();
try {
TreeMap<byte [], Cell> result =
@@ -1215,7 +1215,7 @@ public Cell[] get(byte [] row, byte [] column, long timestamp,
return result;
} finally {
- releaseRowLock(lid);
+ if(lockid == null) releaseRowLock(lid);
}
}
@@ -1347,7 +1347,7 @@ public InternalScanner getScanner(byte[][] cols, byte [] firstRow,
* @param b
* @throws IOException
*/
- public void batchUpdate(BatchUpdate b)
+ public void batchUpdate(BatchUpdate b, Integer lockid)
throws IOException {
checkReadOnly();
@@ -1363,7 +1363,8 @@ public void batchUpdate(BatchUpdate b)
// See HRegionServer#RegionListener for how the expire on HRegionServer
// invokes a HRegion#abort.
byte [] row = b.getRow();
- Integer lid = obtainRowLock(row);
+ // If we did not pass an existing row lock, obtain a new one
+ Integer lid = getLock(lockid,row);
long commitTime = (b.getTimestamp() == LATEST_TIMESTAMP) ?
System.currentTimeMillis() : b.getTimestamp();
try {
@@ -1408,7 +1409,7 @@ public void batchUpdate(BatchUpdate b)
this.targetColumns.remove(Long.valueOf(lid));
throw e;
} finally {
- releaseRowLock(lid);
+ if(lockid == null) releaseRowLock(lid);
}
}
@@ -1458,17 +1459,19 @@ private synchronized void doBlocking() {
* @param row
* @param column
* @param ts Delete all entries that have this timestamp or older
+ * @param lockid Row lock
* @throws IOException
*/
- public void deleteAll(final byte [] row, final byte [] column, final long ts)
+ public void deleteAll(final byte [] row, final byte [] column, final long ts,
+ final Integer lockid)
throws IOException {
checkColumn(column);
checkReadOnly();
- Integer lid = obtainRowLock(row);
+ Integer lid = getLock(lockid,row);
try {
deleteMultiple(row, column, ts, ALL_VERSIONS);
} finally {
- releaseRowLock(lid);
+ if(lockid == null) releaseRowLock(lid);
}
}
@@ -1476,12 +1479,14 @@ public void deleteAll(final byte [] row, final byte [] column, final long ts)
* Delete all cells of the same age as the passed timestamp or older.
* @param row
* @param ts Delete all entries that have this timestamp or older
+ * @param lockid Row lock
* @throws IOException
*/
- public void deleteAll(final byte [] row, final long ts)
+ public void deleteAll(final byte [] row, final long ts,
+ final Integer lockid)
throws IOException {
checkReadOnly();
- Integer lid = obtainRowLock(row);
+ Integer lid = getLock(lockid,row);
try {
for (HStore store : stores.values()){
List<HStoreKey> keys = store.getKeys(new HStoreKey(row, ts),
@@ -1493,7 +1498,7 @@ public void deleteAll(final byte [] row, final long ts)
update(edits);
}
} finally {
- releaseRowLock(lid);
+ if(lockid == null) releaseRowLock(lid);
}
}
@@ -1504,12 +1509,14 @@ public void deleteAll(final byte [] row, final long ts)
* @param row The row to operate on
* @param family The column family to match
* @param timestamp Timestamp to match
+ * @param lockid Row lock
* @throws IOException
*/
- public void deleteFamily(byte [] row, byte [] family, long timestamp)
+ public void deleteFamily(byte [] row, byte [] family, long timestamp,
+ final Integer lockid)
throws IOException{
checkReadOnly();
- Integer lid = obtainRowLock(row);
+ Integer lid = getLock(lockid,row);
try {
// find the HStore for the column family
HStore store = getStore(family);
@@ -1522,7 +1529,7 @@ public void deleteFamily(byte [] row, byte [] family, long timestamp)
}
update(edits);
} finally {
- releaseRowLock(lid);
+ if(lockid == null) releaseRowLock(lid);
}
}
@@ -1552,7 +1559,7 @@ private void deleteMultiple(final byte [] row, final byte [] column,
update(edits);
}
}
-
+
/**
* @throws IOException Throws exception if region is in read-only mode.
*/
@@ -1778,6 +1785,41 @@ void releaseRowLock(final Integer lockid) {
}
}
+ /**
+ * See if row is currently locked.
+ * @param lockid
+ * @return boolean
+ */
+ private boolean isRowLocked(final Integer lockid) {
+ synchronized (locksToRows) {
+ if(locksToRows.containsKey(lockid)) {
+ return true;
+ } else {
+ return false;
+ }
+ }
+ }
+
+ /**
+ * Returns existing row lock if found, otherwise
+ * obtains a new row lock and returns it.
+ * @param lockid
+ * @return lockid
+ */
+ private Integer getLock(Integer lockid, byte [] row)
+ throws IOException {
+ Integer lid = null;
+ if(lockid == null) {
+ lid = obtainRowLock(row);
+ } else {
+ if(!isRowLocked(lockid)) {
+ throw new IOException("Invalid row lock");
+ }
+ lid = lockid;
+ }
+ return lid;
+ }
+
private void waitOnRowLocks() {
synchronized (locksToRows) {
while (this.locksToRows.size() > 0) {
@@ -2134,7 +2176,8 @@ public static void addRegionToMETA(HRegion meta, HRegion r)
public static void removeRegionFromMETA(final HRegionInterface srvr,
final byte [] metaRegionName, final byte [] regionName)
throws IOException {
- srvr.deleteAll(metaRegionName, regionName, HConstants.LATEST_TIMESTAMP);
+ srvr.deleteAll(metaRegionName, regionName, HConstants.LATEST_TIMESTAMP,
+ (long)-1L);
}
/**
@@ -2155,7 +2198,7 @@ public static void offlineRegionInMETA(final HRegionInterface srvr,
b.delete(COL_STARTCODE);
// If carrying splits, they'll be in place when we show up on new
// server.
- srvr.batchUpdate(metaRegionName, b);
+ srvr.batchUpdate(metaRegionName, b, (long)-1L);
}
/**
diff --git a/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index 85be1d8be543..9024e5987ea4 100644
--- a/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ b/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -69,6 +69,7 @@
import org.apache.hadoop.hbase.RegionHistorian;
import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.UnknownScannerException;
+import org.apache.hadoop.hbase.UnknownRowLockException;
import org.apache.hadoop.hbase.Leases.LeaseStillHeldException;
import org.apache.hadoop.hbase.filter.RowFilterInterface;
import org.apache.hadoop.hbase.io.BatchOperation;
@@ -1048,7 +1049,7 @@ public Cell[] get(final byte [] regionName, final byte [] row,
/** {@inheritDoc} */
public RowResult getRow(final byte [] regionName, final byte [] row,
- final byte [][] columns, final long ts)
+ final byte [][] columns, final long ts, final long lockId)
throws IOException {
checkOpen();
requestCount.incrementAndGet();
@@ -1061,7 +1062,8 @@ public RowResult getRow(final byte [] regionName, final byte [] row,
}
HRegion region = getRegion(regionName);
- Map<byte [], Cell> map = region.getFull(row, columnSet, ts);
+ Map<byte [], Cell> map = region.getFull(row, columnSet, ts,
+ getLockFromId(lockId));
HbaseMapWritable<byte [], Cell> result =
new HbaseMapWritable<byte [], Cell>();
result.putAll(map);
@@ -1126,7 +1128,7 @@ public RowResult next(final long scannerId) throws IOException {
}
/** {@inheritDoc} */
- public void batchUpdate(final byte [] regionName, BatchUpdate b)
+ public void batchUpdate(final byte [] regionName, BatchUpdate b, long lockId)
throws IOException {
checkOpen();
this.requestCount.incrementAndGet();
@@ -1134,7 +1136,7 @@ public void batchUpdate(final byte [] regionName, BatchUpdate b)
validateValuesLength(b, region);
try {
cacheFlusher.reclaimMemcacheMemory();
- region.batchUpdate(b);
+ region.batchUpdate(b, getLockFromId(lockId));
} catch (OutOfMemoryError error) {
abort();
LOG.fatal("Ran out of memory", error);
@@ -1239,7 +1241,7 @@ public void close(final long scannerId) throws IOException {
}
Map<String, InternalScanner> scanners =
- Collections.synchronizedMap(new HashMap<String, InternalScanner>());
+ new ConcurrentHashMap<String, InternalScanner>();
/**
* Instantiated as a scanner lease.
@@ -1275,26 +1277,157 @@ public void leaseExpired() {
/** {@inheritDoc} */
public void deleteAll(final byte [] regionName, final byte [] row,
- final byte [] column, final long timestamp)
+ final byte [] column, final long timestamp, final long lockId)
throws IOException {
HRegion region = getRegion(regionName);
- region.deleteAll(row, column, timestamp);
+ region.deleteAll(row, column, timestamp, getLockFromId(lockId));
}
/** {@inheritDoc} */
public void deleteAll(final byte [] regionName, final byte [] row,
- final long timestamp)
+ final long timestamp, final long lockId)
throws IOException {
HRegion region = getRegion(regionName);
- region.deleteAll(row, timestamp);
+ region.deleteAll(row, timestamp, getLockFromId(lockId));
}
/** {@inheritDoc} */
public void deleteFamily(byte [] regionName, byte [] row, byte [] family,
- long timestamp) throws IOException{
- getRegion(regionName).deleteFamily(row, family, timestamp);
+ long timestamp, final long lockId)
+ throws IOException{
+ getRegion(regionName).deleteFamily(row, family, timestamp,
+ getLockFromId(lockId));
}
+ /** {@inheritDoc} */
+ public long lockRow(byte [] regionName, byte [] row)
+ throws IOException {
+ checkOpen();
+ NullPointerException npe = null;
+ if(regionName == null) {
+ npe = new NullPointerException("regionName is null");
+ } else if(row == null) {
+ npe = new NullPointerException("row to lock is null");
+ }
+ if(npe != null) {
+ IOException io = new IOException("Invalid arguments to lockRow");
+ io.initCause(npe);
+ throw io;
+ }
+ requestCount.incrementAndGet();
+ try {
+ HRegion region = getRegion(regionName);
+ Integer r = region.obtainRowLock(row);
+ long lockId = addRowLock(r,region);
+ LOG.debug("Row lock " + lockId + " explicitly acquired by client");
+ return lockId;
+ } catch (IOException e) {
+ LOG.error("Error obtaining row lock (fsOk: " + this.fsOk + ")",
+ RemoteExceptionHandler.checkIOException(e));
+ checkFileSystem();
+ throw e;
+ }
+ }
+
+ protected long addRowLock(Integer r, HRegion region) throws LeaseStillHeldException {
+ long lockId = -1L;
+ lockId = rand.nextLong();
+ String lockName = String.valueOf(lockId);
+ synchronized(rowlocks) {
+ rowlocks.put(lockName, r);
+ }
+ this.leases.
+ createLease(lockName, new RowLockListener(lockName, region));
+ return lockId;
+ }
+
+ /**
+ * Method to get the Integer lock identifier used internally
+ * from the long lock identifier used by the client.
+ * @param lockId long row lock identifier from client
+ * @return intId Integer row lock used internally in HRegion
+ * @throws IOException Thrown if this is not a valid client lock id.
+ */
+ private Integer getLockFromId(long lockId)
+ throws IOException {
+ if(lockId == -1L) {
+ return null;
+ }
+ String lockName = String.valueOf(lockId);
+ Integer rl = null;
+ synchronized(rowlocks) {
+ rl = rowlocks.get(lockName);
+ }
+ if(rl == null) {
+ throw new IOException("Invalid row lock");
+ }
+ this.leases.renewLease(lockName);
+ return rl;
+ }
+
+ /** {@inheritDoc} */
+ public void unlockRow(byte [] regionName, long lockId)
+ throws IOException {
+ checkOpen();
+ NullPointerException npe = null;
+ if(regionName == null) {
+ npe = new NullPointerException("regionName is null");
+ } else if(lockId == -1L) {
+ npe = new NullPointerException("lockId is null");
+ }
+ if(npe != null) {
+ IOException io = new IOException("Invalid arguments to unlockRow");
+ io.initCause(npe);
+ throw io;
+ }
+ requestCount.incrementAndGet();
+ try {
+ HRegion region = getRegion(regionName);
+ String lockName = String.valueOf(lockId);
+ Integer r = null;
+ synchronized(rowlocks) {
+ r = rowlocks.remove(lockName);
+ }
+ if(r == null) {
+ throw new UnknownRowLockException(lockName);
+ }
+ region.releaseRowLock(r);
+ this.leases.cancelLease(lockName);
+ LOG.debug("Row lock " + lockId + " has been explicitly released by client");
+ } catch (IOException e) {
+ checkFileSystem();
+ throw e;
+ }
+ }
+
+ Map<String, Integer> rowlocks =
+ new ConcurrentHashMap<String, Integer>();
+
+ /**
+ * Instantiated as a row lock lease.
+ * If the lease times out, the row lock is released
+ */
+ private class RowLockListener implements LeaseListener {
+ private final String lockName;
+ private final HRegion region;
+
+ RowLockListener(final String lockName, final HRegion region) {
+ this.lockName = lockName;
+ this.region = region;
+ }
+
+ /** {@inheritDoc} */
+ public void leaseExpired() {
+ LOG.info("Row Lock " + this.lockName + " lease expired");
+ Integer r = null;
+ synchronized(rowlocks) {
+ r = rowlocks.remove(this.lockName);
+ }
+ if(r != null) {
+ region.releaseRowLock(r);
+ }
+ }
+ }
/**
* @return Info on this server.
diff --git a/src/java/org/apache/hadoop/hbase/util/Merge.java b/src/java/org/apache/hadoop/hbase/util/Merge.java
index 541fae3ab093..6598770af10a 100644
--- a/src/java/org/apache/hadoop/hbase/util/Merge.java
+++ b/src/java/org/apache/hadoop/hbase/util/Merge.java
@@ -308,7 +308,7 @@ private void removeRegionFromMeta(HRegion meta, HRegionInfo regioninfo)
if (LOG.isDebugEnabled()) {
LOG.debug("Removing region: " + regioninfo + " from " + meta);
}
- meta.deleteAll(regioninfo.getRegionName(), System.currentTimeMillis());
+ meta.deleteAll(regioninfo.getRegionName(), System.currentTimeMillis(), null);
}
/*
diff --git a/src/java/org/apache/hadoop/hbase/util/MetaUtils.java b/src/java/org/apache/hadoop/hbase/util/MetaUtils.java
index c6f8458c465d..b84454447d40 100644
--- a/src/java/org/apache/hadoop/hbase/util/MetaUtils.java
+++ b/src/java/org/apache/hadoop/hbase/util/MetaUtils.java
@@ -407,7 +407,7 @@ public void updateMETARegionInfo(HRegion r, final HRegionInfo hri)
}
BatchUpdate b = new BatchUpdate(hri.getRegionName());
b.put(HConstants.COL_REGIONINFO, Writables.getBytes(hri));
- r.batchUpdate(b);
+ r.batchUpdate(b, null);
if (LOG.isDebugEnabled()) {
HRegionInfo h = Writables.getHRegionInfoOrNull(
r.get(hri.getRegionName(), HConstants.COL_REGIONINFO).getValue());
diff --git a/src/java/org/apache/hadoop/hbase/util/migration/v5/HRegion.java b/src/java/org/apache/hadoop/hbase/util/migration/v5/HRegion.java
index 7efbd7097a2a..d117fca0470b 100644
--- a/src/java/org/apache/hadoop/hbase/util/migration/v5/HRegion.java
+++ b/src/java/org/apache/hadoop/hbase/util/migration/v5/HRegion.java
@@ -2050,7 +2050,8 @@ public static void addRegionToMETA(HRegion meta, HRegion r)
public static void removeRegionFromMETA(final HRegionInterface srvr,
final byte [] metaRegionName, final byte [] regionName)
throws IOException {
- srvr.deleteAll(metaRegionName, regionName, HConstants.LATEST_TIMESTAMP);
+ srvr.deleteAll(metaRegionName, regionName, HConstants.LATEST_TIMESTAMP,
+ -1L);
}
/**
@@ -2071,7 +2072,7 @@ public static void offlineRegionInMETA(final HRegionInterface srvr,
b.delete(COL_STARTCODE);
// If carrying splits, they'll be in place when we show up on new
// server.
- srvr.batchUpdate(metaRegionName, b);
+ srvr.batchUpdate(metaRegionName, b, -1L);
}
/**
|
6aa31e87bc18b613d66eef751f5481d161ee321d
|
orientdb
|
Unneeded index flush was removed--
|
p
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/metadata/OMetadataDefault.java b/core/src/main/java/com/orientechnologies/orient/core/metadata/OMetadataDefault.java
index 0d09ad52126..a75938889fe 100755
--- a/core/src/main/java/com/orientechnologies/orient/core/metadata/OMetadataDefault.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/metadata/OMetadataDefault.java
@@ -150,12 +150,6 @@ public OSecurityShared call() {
security = instance;
instance.load();
}
-
- // if (instance.getAllRoles().isEmpty()) {
- // OLogManager.instance().error(this, "No security has been installed, create default users and roles");
- // security.repair();
- // }
-
return instance;
}
}), database);
@@ -198,14 +192,10 @@ public void reload() {
* Closes internal objects
*/
public void close() {
- if (indexManager != null)
- indexManager.flush();
if (schema != null)
schema.close();
if (security != null)
security.close();
- // if (functionLibrary != null)
- // functionLibrary.close();
}
protected ODatabaseRecord getDatabase() {
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLUpdate.java b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLUpdate.java
index 80bc4dc4439..e8715e6d73d 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLUpdate.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLUpdate.java
@@ -15,15 +15,8 @@
*/
package com.orientechnologies.orient.core.sql;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
import java.util.Map.Entry;
-import java.util.Set;
import com.orientechnologies.common.util.OPair;
import com.orientechnologies.orient.core.command.OCommandRequest;
@@ -155,9 +148,15 @@ public Object execute(final Map<Object, Object> iArgs) {
parameters = new OCommandParameters(iArgs);
+ Map<Object, Object> queryArgs = new HashMap<Object, Object>();
+ for (int i = parameterCounter; i < parameters.size(); i++) {
+ if (parameters.getByName(i) != null)
+ queryArgs.put(i - parameterCounter, parameters.getByName(i));
+ }
+
query.setUseCache(false);
query.setContext(context);
- getDatabase().query(query, iArgs);
+ getDatabase().query(query, queryArgs);
return recordCount;
}
diff --git a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java
index ecd04665490..45b58c51ad0 100755
--- a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java
@@ -155,7 +155,8 @@ protected void onAfterRequest() throws IOException {
if (connection != null) {
if (connection.database != null)
- connection.database.getLevel1Cache().clear();
+ if (!connection.database.isClosed())
+ connection.database.getLevel1Cache().clear();
connection.data.lastCommandExecutionTime = System.currentTimeMillis() - connection.data.lastCommandReceived;
connection.data.totalCommandExecutionTime += connection.data.lastCommandExecutionTime;
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLUpdateTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLUpdateTest.java
index 6039ab4ea47..9db222d74a4 100644
--- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLUpdateTest.java
+++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLUpdateTest.java
@@ -15,12 +15,7 @@
*/
package com.orientechnologies.orient.test.database.auto;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
import org.testng.Assert;
import org.testng.annotations.Parameters;
@@ -252,25 +247,6 @@ public void updateWithWildcardsOnSetAndWhere() {
database.close();
}
- @Test
- public void updateWithNamedParameters(){
- database.open("admin", "admin");
- ODocument doc = new ODocument("Person");
- doc.field("name", "Caf");
- doc.field("city", "Torino");
- doc.field("gender", "fmale");
- doc.save();
- OCommandSQL updatecommand = new OCommandSQL("update Person set gender = :gender , city = :city where name = :name");
- Map<String,Object> params = new HashMap<String, Object>();
- params.put("gender", "f");
- params.put("city", "TOR");
- params.put("name", "Caf");
- database.command(updatecommand).execute(params);
- checkUpdatedDoc(database, "Caf", "TOR", "f");
-
- database.close();
- }
-
public void updateIncrement() {
database.open("admin", "admin");
diff --git a/tests/src/test/resources/orientdb-server-config.xml b/tests/src/test/resources/orientdb-server-config.xml
index 29c97a68ff2..dd10ae68e0f 100644
--- a/tests/src/test/resources/orientdb-server-config.xml
+++ b/tests/src/test/resources/orientdb-server-config.xml
@@ -25,16 +25,22 @@
</handlers>
<network>
<protocols>
- <protocol implementation="com.orientechnologies.orient.server.network.protocol.binary.ONetworkProtocolBinary" name="binary"/>
- <protocol implementation="com.orientechnologies.orient.server.network.protocol.http.ONetworkProtocolHttpDb" name="http"/>
+ <protocol
+ implementation="com.orientechnologies.orient.server.network.protocol.binary.ONetworkProtocolBinary"
+ name="binary"/>
+ <protocol implementation="com.orientechnologies.orient.server.network.protocol.http.ONetworkProtocolHttpDb"
+ name="http"/>
</protocols>
<listeners>
<listener protocol="binary" port-range="2424-2430" ip-address="0.0.0.0"/>
<listener protocol="http" port-range="2480-2490" ip-address="0.0.0.0">
<commands>
- <command implementation="com.orientechnologies.orient.server.network.protocol.http.command.get.OServerCommandGetStaticContent" pattern="GET|www GET|studio/ GET| GET|*.htm GET|*.html GET|*.xml GET|*.jpeg GET|*.jpg GET|*.png GET|*.gif GET|*.js GET|*.css GET|*.swf GET|*.ico GET|*.txt GET|*.otf GET|*.pjs GET|*.svg">
+ <command
+ implementation="com.orientechnologies.orient.server.network.protocol.http.command.get.OServerCommandGetStaticContent"
+ pattern="GET|www GET|studio/ GET| GET|*.htm GET|*.html GET|*.xml GET|*.jpeg GET|*.jpg GET|*.png GET|*.gif GET|*.js GET|*.css GET|*.swf GET|*.ico GET|*.txt GET|*.otf GET|*.pjs GET|*.svg">
<parameters>
- <entry value="Cache-Control: no-cache, no-store, max-age=0, must-revalidate\r\nPragma: no-cache" name="http.cache:*.htm *.html"/>
+ <entry value="Cache-Control: no-cache, no-store, max-age=0, must-revalidate\r\nPragma: no-cache"
+ name="http.cache:*.htm *.html"/>
<entry value="Cache-Control: max-age=120" name="http.cache:default"/>
</parameters>
</command>
@@ -52,7 +58,11 @@
<user resources="connect,server.listDatabases" password="guest" name="guest"/>
</users>
<properties>
- <entry value="info" name="log.console.level"/>
- <entry value="fine" name="log.file.level"/>
+ <entry name="cache.level2.enabled" value="false"/>
+ <entry name="cache.level2.size" value="0"/>
+ <entry name="cache.level1.enabled" value="false"/>
+ <entry name="cache.level1.size" value="0"/>
+ <entry name="log.console.level" value="info"/>
+ <entry name="log.file.level" value="fine"/>
</properties>
</orient-server>
|
4f79b07e174ed1f57115a6b0a9f6a6e74e6733ee
|
hadoop
|
HADOOP-6932. Namenode start (init) fails because- of invalid kerberos key, even when security set to simple--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@991030 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/CHANGES.txt b/CHANGES.txt
index f43935c87233a..72a1e3e6ffa26 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -220,6 +220,9 @@ Trunk (unreleased changes)
HADOOP-6833. IPC leaks call parameters when exceptions thrown.
(Todd Lipcon via Eli Collins)
+ HADOOP-6932. Namenode start (init) fails because of invalid kerberos
+ key, even when security set to "simple" (boryas)
+
Release 0.21.0 - Unreleased
INCOMPATIBLE CHANGES
diff --git a/src/java/org/apache/hadoop/security/SecurityUtil.java b/src/java/org/apache/hadoop/security/SecurityUtil.java
index 00187bd6f2401..44ef31ef32989 100644
--- a/src/java/org/apache/hadoop/security/SecurityUtil.java
+++ b/src/java/org/apache/hadoop/security/SecurityUtil.java
@@ -174,7 +174,7 @@ static String getLocalHostName() throws UnknownHostException {
}
/**
- * If a keytab has been provided, login as that user. Substitute $host in
+ * Login as a principal specified in config. Substitute $host in
* user's Kerberos principal name with a dynamically looked-up fully-qualified
* domain name of the current host.
*
@@ -192,8 +192,9 @@ public static void login(final Configuration conf,
}
/**
- * If a keytab has been provided, login as that user. Substitute $host in
- * user's Kerberos principal name with hostname.
+ * Login as a principal specified in config. Substitute $host in user's Kerberos principal
+ * name with hostname. If non-secure mode - return. If no keytab available -
+ * bail out with an exception
*
* @param conf
* conf to use
@@ -208,9 +209,14 @@ public static void login(final Configuration conf,
public static void login(final Configuration conf,
final String keytabFileKey, final String userNameKey, String hostname)
throws IOException {
- String keytabFilename = conf.get(keytabFileKey);
- if (keytabFilename == null)
+
+ if(! UserGroupInformation.isSecurityEnabled())
return;
+
+ String keytabFilename = conf.get(keytabFileKey);
+ if (keytabFilename == null || keytabFilename.length() == 0) {
+ throw new IOException("Running in secure mode, but config doesn't have a keytab");
+ }
String principalConfig = conf.get(userNameKey, System
.getProperty("user.name"));
diff --git a/src/test/core/org/apache/hadoop/security/TestSecurityUtil.java b/src/test/core/org/apache/hadoop/security/TestSecurityUtil.java
index 14ec74372d091..d5a3a25f90972 100644
--- a/src/test/core/org/apache/hadoop/security/TestSecurityUtil.java
+++ b/src/test/core/org/apache/hadoop/security/TestSecurityUtil.java
@@ -16,12 +16,15 @@
*/
package org.apache.hadoop.security;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
import java.io.IOException;
import javax.security.auth.kerberos.KerberosPrincipal;
+import org.apache.hadoop.conf.Configuration;
+import org.junit.Assert;
import org.junit.Test;
public class TestSecurityUtil {
@@ -70,4 +73,23 @@ public void testGetServerPrincipal() throws IOException {
verify(shouldNotReplace, hostname, shouldNotReplace);
verify(shouldNotReplace, shouldNotReplace, shouldNotReplace);
}
+
+ @Test
+ public void testStartsWithIncorrectSettings() throws IOException {
+ Configuration conf = new Configuration();
+ conf.set(
+ org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
+ "kerberos");
+ String keyTabKey="key";
+ conf.set(keyTabKey, "");
+ UserGroupInformation.setConfiguration(conf);
+ boolean gotException = false;
+ try {
+ SecurityUtil.login(conf, keyTabKey, "", "");
+ } catch (IOException e) {
+ // expected
+ gotException=true;
+ }
+ assertTrue("Exception for empty keytabfile name was expected", gotException);
+ }
}
|
821b8e890f81782bd070a6280c5255d8352d4a56
|
intellij-community
|
optimization: determine token end as next token- start in PsiBuilder--
|
p
|
https://github.com/JetBrains/intellij-community
|
diff --git a/lang-impl/src/com/intellij/lang/impl/PsiBuilderImpl.java b/lang-impl/src/com/intellij/lang/impl/PsiBuilderImpl.java
index c6da2634f8a0b..7160f9c7b99a1 100644
--- a/lang-impl/src/com/intellij/lang/impl/PsiBuilderImpl.java
+++ b/lang-impl/src/com/intellij/lang/impl/PsiBuilderImpl.java
@@ -52,7 +52,6 @@ public class PsiBuilderImpl extends UserDataHolderBase implements PsiBuilder {
private static final Logger LOG = Logger.getInstance("#com.intellij.lang.impl.PsiBuilderImpl");
private int[] myLexStarts;
- private int[] myLexEnds;
private IElementType[] myLexTypes;
private final MyList myProduction = new MyList();
@@ -141,7 +140,6 @@ private void cacheLexems() {
int approxLexCount = Math.max(10, myText.length() / 5);
myLexStarts = new int[approxLexCount];
- myLexEnds = new int[approxLexCount];
myLexTypes = new IElementType[approxLexCount];
int i = 0;
@@ -151,16 +149,17 @@ private void cacheLexems() {
IElementType type = myLexer.getTokenType();
if (type == null) break;
- if (i >= myLexStarts.length) {
+ if (i >= myLexTypes.length - 1) {
resizeLexems(i * 3 / 2);
}
myLexStarts[i] = myLexer.getTokenStart();
- myLexEnds[i] = myLexer.getTokenEnd();
myLexTypes[i] = type;
i++;
myLexer.advance();
}
+ myLexStarts[i] = myText.length();
+
myLexemCount = i;
}
@@ -445,7 +444,7 @@ public IElementType getTokenType() {
if (myRemapper != null) {
IElementType type = myLexTypes[myCurrentLexem];
- type = myRemapper.filter(type, myLexStarts[myCurrentLexem], myLexEnds[myCurrentLexem], myLexer.getBufferSequence());
+ type = myRemapper.filter(type, myLexStarts[myCurrentLexem], myLexStarts[myCurrentLexem + 1], myLexer.getBufferSequence());
myLexTypes[myCurrentLexem] = type; // filter may have changed the type
return type;
}
@@ -480,19 +479,15 @@ public String getTokenText() {
if (type instanceof TokenWrapper) {
return ((TokenWrapper)type).getValue();
}
- return myText.subSequence(myLexStarts[myCurrentLexem], myLexEnds[myCurrentLexem]).toString();
+ return myText.subSequence(myLexStarts[myCurrentLexem], myLexStarts[myCurrentLexem + 1]).toString();
}
private void resizeLexems(final int newSize) {
- int count = Math.min(newSize, myLexStarts.length);
- int[] newStarts = new int[newSize];
+ int count = Math.min(newSize, myLexTypes.length);
+ int[] newStarts = new int[newSize + 1];
System.arraycopy(myLexStarts, 0, newStarts, 0, count);
myLexStarts = newStarts;
- int[] newEnds = new int[newSize];
- System.arraycopy(myLexEnds, 0, newEnds, 0, count);
- myLexEnds = newEnds;
-
IElementType[] newTypes = new IElementType[newSize];
System.arraycopy(myLexTypes, 0, newTypes, 0, count);
myLexTypes = newTypes;
@@ -797,12 +792,12 @@ else if (item instanceof ErrorItem) {
LOG.error("Not all of the tokens inserted to the tree, parsed text:\n" + myText);
}
- if (myLexStarts.length <= myCurrentLexem) {
+ if (myLexStarts.length <= myCurrentLexem + 1) {
resizeLexems(myCurrentLexem + 1);
}
myLexStarts[myCurrentLexem] = myLexer.getTokenStart(); // $ terminating token.;
- myLexEnds[myCurrentLexem] = 0;
+ myLexStarts[myCurrentLexem + 1] = 0;
myLexTypes[myCurrentLexem] = null;
LOG.assertTrue(curNode == rootMarker, UNBALANCED_MESSAGE);
@@ -844,7 +839,7 @@ private int insertLeafs(int curToken, int lastIdx, final CompositeElement curNod
lastIdx = Math.min(lastIdx, myLexemCount);
while (curToken < lastIdx) {
final int start = myLexStarts[curToken];
- final int end = myLexEnds[curToken];
+ final int end = myLexStarts[curToken + 1];
if (start < end || myLexTypes[curToken] instanceof ILeafElementType) { // Empty token. Most probably a parser directive like indent/dedent in phyton
final IElementType type = myLexTypes[curToken];
final TreeElement leaf = createLeaf(type, start, end);
@@ -1022,7 +1017,7 @@ private int insertLeafs(int curToken, int lastIdx, Ref<LighterASTNode[]> into) {
lastIdx = Math.min(lastIdx, myLexemCount);
while (curToken < lastIdx) {
final int start = myLexStarts[curToken];
- final int end = myLexEnds[curToken];
+ final int end = myLexStarts[curToken + 1];
final IElementType type = myLexTypes[curToken];
if (start < end || type instanceof ILeafElementType) { // Empty token. Most probably a parser directive like indent/dedent in phyton
Token lexem = myPool.alloc();
|
8c3a7e4a7dc3a575720b6e812fc91b38cdd211c5
|
hbase
|
HBASE-8703 [WINDOWS] Timed-out processes exit- with non-zero code causing HealthChecker to report incorrectly. ADDENDUM- patch to fix flaky test--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1499047 13f79535-47bb-0310-9956-ffa450edef68-
|
c
|
https://github.com/apache/hbase
|
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java
index 9f36e7e5d50f..17036bec5d18 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java
@@ -25,6 +25,7 @@
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
+import java.util.UUID;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -55,32 +56,38 @@ public void cleanUp() throws IOException {
}
@Test
- public void testHealthChecker() throws Exception {
+ public void testHealthCheckerSuccess() throws Exception {
+ String normalScript = "echo \"I am all fine\"";
+ healthCheckerTest(normalScript, HealthCheckerExitStatus.SUCCESS);
+ }
+
+ @Test
+ public void testHealthCheckerFail() throws Exception {
+ String errorScript = "echo ERROR" + eol + "echo \"Node not healthy\"";
+ healthCheckerTest(errorScript, HealthCheckerExitStatus.FAILED);
+ }
+
+ @Test
+ public void testHealthCheckerTimeout() throws Exception {
+ String timeOutScript = "sleep 4" + eol + "echo \"I am fine\"";
+ healthCheckerTest(timeOutScript, HealthCheckerExitStatus.TIMED_OUT);
+ }
+
+ public void healthCheckerTest(String script, HealthCheckerExitStatus expectedStatus)
+ throws Exception {
Configuration config = getConfForNodeHealthScript();
config.addResource(healthScriptFile.getName());
String location = healthScriptFile.getAbsolutePath();
long timeout = config.getLong(HConstants.HEALTH_SCRIPT_TIMEOUT, 2000);
+
HealthChecker checker = new HealthChecker();
checker.init(location, timeout);
- String normalScript = "echo \"I am all fine\"";
- createScript(normalScript, true);
+ createScript(script, true);
HealthReport report = checker.checkHealth();
+ assertEquals(expectedStatus, report.getStatus());
LOG.info("Health Status:" + report.getHealthReport());
- assertEquals(HealthCheckerExitStatus.SUCCESS, report.getStatus());
-
- String errorScript = "echo ERROR" + eol + "echo \"Server not healthy\"";
- createScript(errorScript, true);
- report = checker.checkHealth();
- LOG.info("Health Status:" + report.getHealthReport());
- assertEquals(HealthCheckerExitStatus.FAILED, report.getStatus());
-
- String timeOutScript = "sleep 4" + eol + "echo \"I am fine\"";
- createScript(timeOutScript, true);
- report = checker.checkHealth();
- LOG.info("Health Status:" + report.getHealthReport());
- assertEquals(HealthCheckerExitStatus.TIMED_OUT, report.getStatus());
this.healthScriptFile.delete();
}
@@ -130,7 +137,8 @@ private Configuration getConfForNodeHealthScript() throws IOException {
throw new IOException("Failed mkdirs " + tempDir);
}
}
- String scriptName = Shell.WINDOWS ? "HealthScript.cmd" : "HealthScript.sh";
+ String scriptName = "HealthScript" + UUID.randomUUID().toString()
+ + (Shell.WINDOWS ? ".cmd" : ".sh");
healthScriptFile = new File(tempDir.getAbsolutePath(), scriptName);
conf.set(HConstants.HEALTH_SCRIPT_LOC, healthScriptFile.getAbsolutePath());
conf.setLong(HConstants.HEALTH_FAILURE_THRESHOLD, 3);
|
d4ffca41d7ef1d04ab3d4a1580817adace72a5da
|
hadoop
|
YARN-3108. ApplicationHistoryServer doesn't process- -D arguments (Chang Li via jeagles)--(cherry picked from commit 30a8778c632c0f57cdd005080a470065a60756a8)-
|
c
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index ed6642cfa97c0..37d9a1cbbb510 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -185,6 +185,9 @@ Release 2.7.0 - UNRELEASED
scheduler web UI and queue initialize/refresh logging.
(Eric Payne via wangda)
+ YARN-3108. ApplicationHistoryServer doesn't process -D arguments (Chang Li
+ via jeagles)
+
OPTIMIZATIONS
BUG FIXES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
index 352460558c380..0bafd3695eedd 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
@@ -33,6 +33,7 @@
import org.apache.hadoop.service.CompositeService;
import org.apache.hadoop.service.Service;
import org.apache.hadoop.util.ExitUtil;
+import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.ShutdownHookManager;
import org.apache.hadoop.util.StringUtils;
@@ -153,6 +154,7 @@ static ApplicationHistoryServer launchAppHistoryServer(String[] args) {
new CompositeServiceShutdownHook(appHistoryServer),
SHUTDOWN_HOOK_PRIORITY);
YarnConfiguration conf = new YarnConfiguration();
+ new GenericOptionsParser(conf, args);
appHistoryServer.init(conf);
appHistoryServer.start();
} catch (Throwable t) {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
index cf85ff7dd426d..a7e7daa1c58ad 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
@@ -106,6 +106,32 @@ public void testLaunch() throws Exception {
}
}
+ //test launch method with -D arguments
+ @Test(timeout = 60000)
+ public void testLaunchWithArguments() throws Exception {
+ ExitUtil.disableSystemExit();
+ ApplicationHistoryServer historyServer = null;
+ try {
+ // Not able to modify the config of this test case,
+ // but others have been customized to avoid conflicts
+ String[] args = new String[2];
+ args[0]="-D" + YarnConfiguration.TIMELINE_SERVICE_LEVELDB_TTL_INTERVAL_MS + "=4000";
+ args[1]="-D" + YarnConfiguration.TIMELINE_SERVICE_TTL_MS + "=200";
+ historyServer =
+ ApplicationHistoryServer.launchAppHistoryServer(args);
+ Configuration conf = historyServer.getConfig();
+ assertEquals("4000", conf.get(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_TTL_INTERVAL_MS));
+ assertEquals("200", conf.get(YarnConfiguration.TIMELINE_SERVICE_TTL_MS));
+ } catch (ExitUtil.ExitException e) {
+ assertEquals(0, e.status);
+ ExitUtil.resetFirstExitException();
+ fail();
+ } finally {
+ if (historyServer != null) {
+ historyServer.stop();
+ }
+ }
+ }
@Test(timeout = 240000)
public void testFilterOverrides() throws Exception {
|
33660bc78e9b8d0cef71d635bf609c6dcb6ddeea
|
intellij-community
|
Fixed typos in test.--
|
p
|
https://github.com/JetBrains/intellij-community
|
diff --git a/python/testSrc/com/jetbrains/python/PyPropertyTestSuite.java b/python/testSrc/com/jetbrains/python/PyPropertyTestSuite.java
index 6a6512d0e4eae..9902b4b6d20f4 100644
--- a/python/testSrc/com/jetbrains/python/PyPropertyTestSuite.java
+++ b/python/testSrc/com/jetbrains/python/PyPropertyTestSuite.java
@@ -27,10 +27,12 @@ abstract static class PyPropertyTest extends PyResolveTestCase {
protected PyClass myClass;
protected LanguageLevel myLanguageLevel = LanguageLevel.PYTHON26;
+ abstract String getFileName();
+
@Override
protected void setUp() throws Exception {
super.setUp();
- PsiReference ref = configureByFile("property/"+ "Classic.py");
+ PsiReference ref = configureByFile("property/"+ getFileName());
final Project project = ref.getElement().getContainingFile().getProject();
project.putUserData(PyBuiltinCache.TEST_SDK, PythonMockSdk.findOrCreate());
PythonLanguageLevelPusher.setForcedLanguageLevel(project, myLanguageLevel);
@@ -49,6 +51,11 @@ public PyClassicPropertyTest() {
super();
}
+ @Override
+ String getFileName() {
+ return "Classic.py";
+ }
+
public void testV1() throws Exception {
Property p;
Maybe<PyFunction> accessor;
@@ -148,13 +155,23 @@ public void testV4() throws Exception {
public static class PyDecoratedPropertyTest extends PyPropertyTest {
public PyDecoratedPropertyTest() {
super();
+ }
+
+ @Override
+ String getFileName() {
+ return "Decorated.py";
+ }
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
myLanguageLevel = LanguageLevel.PYTHON26;
}
public void testW1() throws Exception {
Property p;
Maybe<PyFunction> accessor;
- p = myClass.findProperty("W1");
+ p = myClass.findProperty("w1");
assertNotNull(p);
assertNull(p.getDoc());
assertNull(p.getDefinitionSite());
@@ -178,7 +195,7 @@ public void testW1() throws Exception {
public void testW2() throws Exception {
Property p;
Maybe<PyFunction> accessor;
- p = myClass.findProperty("W2");
+ p = myClass.findProperty("w2");
assertNotNull(p);
assertNull(p.getDoc());
assertNull(p.getDefinitionSite());
|
b4dbb666fc187328f42ae63f5d173a57000dff47
|
kotlin
|
More usages of asKtScope() removed--
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/resolve/DescriptorResolver.java b/compiler/frontend/src/org/jetbrains/kotlin/resolve/DescriptorResolver.java
index bf6d92b82b142..bb29ec812d2e7 100644
--- a/compiler/frontend/src/org/jetbrains/kotlin/resolve/DescriptorResolver.java
+++ b/compiler/frontend/src/org/jetbrains/kotlin/resolve/DescriptorResolver.java
@@ -556,7 +556,7 @@ public void checkNamesInConstraints(
Name name = nameExpression.getReferencedNameAsName();
- ClassifierDescriptor classifier = ScopeUtilsKt.asKtScope(scope).getClassifier(name, NoLookupLocation.UNSORTED);
+ ClassifierDescriptor classifier = ScopeUtilsKt.getClassifier(scope, name, NoLookupLocation.UNSORTED);
if (classifier instanceof TypeParameterDescriptor && classifier.getContainingDeclaration() == descriptor) continue;
if (classifier != null) {
diff --git a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/kdoc/KDocReference.kt b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/kdoc/KDocReference.kt
index bf676bb1ec93a..90b6616c19f0d 100644
--- a/idea/idea-analysis/src/org/jetbrains/kotlin/idea/kdoc/KDocReference.kt
+++ b/idea/idea-analysis/src/org/jetbrains/kotlin/idea/kdoc/KDocReference.kt
@@ -32,7 +32,7 @@ import org.jetbrains.kotlin.psi.psiUtil.getStrictParentOfType
import org.jetbrains.kotlin.resolve.BindingContext
import org.jetbrains.kotlin.resolve.FunctionDescriptorUtil
import org.jetbrains.kotlin.resolve.scopes.*
-import org.jetbrains.kotlin.resolve.scopes.utils.asKtScope
+import org.jetbrains.kotlin.resolve.scopes.utils.getDescriptorsFromAllFiltered
import org.jetbrains.kotlin.resolve.scopes.utils.memberScopeAsImportingScope
import org.jetbrains.kotlin.resolve.source.PsiSourceElement
@@ -85,9 +85,8 @@ public fun resolveKDocLink(resolutionFacade: ResolutionFacade,
var result: Collection<DeclarationDescriptor> = listOf(fromDescriptor)
qualifiedName.forEach { nameComponent ->
- if (result.size() != 1) return listOf()
- val scope = getResolutionScope(resolutionFacade, result.first()).asKtScope()
- result = scope.getDescriptors().filter { it.getName().asString() == nameComponent }
+ val scope = getResolutionScope(resolutionFacade, result.singleOrNull() ?: return emptyList())
+ result = scope.getDescriptorsFromAllFiltered(nameFilter = { it.asString() == nameComponent})
}
return result
@@ -96,9 +95,9 @@ public fun resolveKDocLink(resolutionFacade: ResolutionFacade,
private fun resolveInLocalScope(fromDescriptor: DeclarationDescriptor,
name: String,
resolutionFacade: ResolutionFacade): List<DeclarationDescriptor> {
- val scope = getResolutionScope(resolutionFacade, fromDescriptor).asKtScope()
- return scope.getDescriptors().filter {
- it.getName().asString() == name && it.getContainingDeclaration() == fromDescriptor
+ val scope = getResolutionScope(resolutionFacade, fromDescriptor)
+ return scope.getDescriptorsFromAllFiltered(nameFilter = { it.asString() == name }).filter {
+ it.containingDeclaration == fromDescriptor
}
}
|
797495a179c6fdee40b1be4d1c27693040f3f320
|
drools
|
BZ-1025874: fixing incremental update of kjars--
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-compiler/src/main/java/org/drools/compiler/compiler/CompositeKnowledgeBuilderImpl.java b/drools-compiler/src/main/java/org/drools/compiler/compiler/CompositeKnowledgeBuilderImpl.java
index 50ceaf3dc32..c15a0743d6a 100644
--- a/drools-compiler/src/main/java/org/drools/compiler/compiler/CompositeKnowledgeBuilderImpl.java
+++ b/drools-compiler/src/main/java/org/drools/compiler/compiler/CompositeKnowledgeBuilderImpl.java
@@ -1,20 +1,24 @@
package org.drools.compiler.compiler;
-import org.drools.core.builder.conf.impl.JaxbConfigurationImpl;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.drools.compiler.compiler.PackageBuilder.TypeDefinition;
import org.drools.compiler.lang.descr.CompositePackageDescr;
import org.drools.compiler.lang.descr.ImportDescr;
import org.drools.compiler.lang.descr.PackageDescr;
import org.drools.compiler.lang.descr.TypeDeclarationDescr;
-import org.kie.internal.builder.CompositeKnowledgeBuilder;
+import org.drools.core.builder.conf.impl.JaxbConfigurationImpl;
import org.kie.api.io.Resource;
import org.kie.api.io.ResourceConfiguration;
import org.kie.api.io.ResourceType;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import org.kie.internal.builder.ChangeType;
+import org.kie.internal.builder.CompositeKnowledgeBuilder;
+import org.kie.internal.builder.ResourceChange;
+import org.kie.internal.builder.ResourceChangeSet;
public class CompositeKnowledgeBuilderImpl implements CompositeKnowledgeBuilder {
@@ -46,8 +50,16 @@ public CompositeKnowledgeBuilder add(Resource resource, ResourceType type) {
return add(resource, type, resource.getConfiguration());
}
+ public CompositeKnowledgeBuilder add(Resource resource, ResourceType type, ResourceChangeSet changes) {
+ return add(resource, type, resource.getConfiguration(), changes);
+ }
+
public CompositeKnowledgeBuilder add(Resource resource, ResourceType type, ResourceConfiguration configuration) {
- ResourceDescr resourceDescr = new ResourceDescr(configuration, resource);
+ return add(resource, type, configuration, null);
+ }
+
+ public CompositeKnowledgeBuilder add(Resource resource, ResourceType type, ResourceConfiguration configuration, ResourceChangeSet changes) {
+ ResourceDescr resourceDescr = new ResourceDescr(configuration, resource, changes);
List<ResourceDescr> resourceDescrs = this.resourcesByType.get(type);
if (resourceDescrs == null) {
resourceDescrs = new ArrayList<ResourceDescr>();
@@ -91,7 +103,9 @@ private void registerDSL() {
if (resourcesByType != null) {
for (ResourceDescr resourceDescr : resourcesByType) {
try {
+ pkgBuilder.setAssetFilter(resourceDescr.getFilter());
pkgBuilder.addDsl(resourceDescr.resource);
+ pkgBuilder.setAssetFilter(null);
} catch (RuntimeException e) {
if (buildException == null) {
buildException = e;
@@ -110,6 +124,7 @@ private void buildResources() {
if (resourcesByType != null) {
for (ResourceDescr resourceDescr : resourcesByType) {
try {
+ pkgBuilder.setAssetFilter(resourceDescr.getFilter());
pkgBuilder.addProcessFromXml(resourceDescr.resource);
} catch (RuntimeException e) {
if (buildException == null) {
@@ -119,6 +134,8 @@ private void buildResources() {
if (buildException == null) {
buildException = new RuntimeException( e );
}
+ } finally{
+ pkgBuilder.setAssetFilter(null);
}
}
}
@@ -128,6 +145,7 @@ private void buildResources() {
for (ResourceDescr resourceDescr : resourcesByType) {
try {
BPMN2ProcessFactory.configurePackageBuilder( pkgBuilder );
+ pkgBuilder.setAssetFilter(resourceDescr.getFilter());
pkgBuilder.addProcessFromXml(resourceDescr.resource);
} catch (RuntimeException e) {
if (buildException == null) {
@@ -137,6 +155,8 @@ private void buildResources() {
if (buildException == null) {
buildException = new RuntimeException( e );
}
+ } finally{
+ pkgBuilder.setAssetFilter(null);
}
}
}
@@ -145,6 +165,7 @@ private void buildResources() {
if (resourcesByType != null) {
for (ResourceDescr resourceDescr : resourcesByType) {
try {
+ pkgBuilder.setAssetFilter(resourceDescr.getFilter());
pkgBuilder.addPackageFromInputStream(resourceDescr.resource);
} catch (RuntimeException e) {
if (buildException == null) {
@@ -154,6 +175,8 @@ private void buildResources() {
if (buildException == null) {
buildException = new RuntimeException( e );
}
+ } finally{
+ pkgBuilder.setAssetFilter(null);
}
}
}
@@ -162,6 +185,7 @@ private void buildResources() {
if (resourcesByType != null) {
for (ResourceDescr resourceDescr : resourcesByType) {
try {
+ pkgBuilder.setAssetFilter(resourceDescr.getFilter());
pkgBuilder.addPackageFromChangeSet(resourceDescr.resource);
} catch (RuntimeException e) {
if (buildException == null) {
@@ -171,6 +195,8 @@ private void buildResources() {
if (buildException == null) {
buildException = new RuntimeException( e );
}
+ } finally{
+ pkgBuilder.setAssetFilter(null);
}
}
}
@@ -179,6 +205,7 @@ private void buildResources() {
if (resourcesByType != null) {
for (ResourceDescr resourceDescr : resourcesByType) {
try {
+ pkgBuilder.setAssetFilter(resourceDescr.getFilter());
pkgBuilder.addPackageFromXSD(resourceDescr.resource, (JaxbConfigurationImpl) resourceDescr.configuration);
} catch (RuntimeException e) {
if (buildException == null) {
@@ -188,6 +215,8 @@ private void buildResources() {
if (buildException == null) {
buildException = new RuntimeException( e );
}
+ } finally{
+ pkgBuilder.setAssetFilter(null);
}
}
}
@@ -196,6 +225,7 @@ private void buildResources() {
if (resourcesByType != null) {
for (ResourceDescr resourceDescr : resourcesByType) {
try {
+ pkgBuilder.setAssetFilter(resourceDescr.getFilter());
pkgBuilder.addPackageFromPMML(resourceDescr.resource, ResourceType.PMML, resourceDescr.configuration);
} catch (RuntimeException e) {
if (buildException == null) {
@@ -205,6 +235,8 @@ private void buildResources() {
if (buildException == null) {
buildException = new RuntimeException( e );
}
+ } finally{
+ pkgBuilder.setAssetFilter(null);
}
}
}
@@ -214,7 +246,9 @@ private void buildOthers() {
try {
for (Map.Entry<ResourceType, List<ResourceDescr>> entry : resourcesByType.entrySet()) {
for (ResourceDescr resourceDescr : entry.getValue()) {
+ pkgBuilder.setAssetFilter(resourceDescr.getFilter());
pkgBuilder.addPackageForExternalType(resourceDescr.resource, entry.getKey(), resourceDescr.configuration);
+ pkgBuilder.setAssetFilter(null);
}
}
} catch (RuntimeException e) {
@@ -225,15 +259,17 @@ private void buildOthers() {
}
private void buildRules(Collection<CompositePackageDescr> packages) {
- for (PackageDescr packageDescr : packages) {
+ for (CompositePackageDescr packageDescr : packages) {
+ pkgBuilder.setAssetFilter(packageDescr.getFilter());
PackageRegistry pkgRegistry = pkgBuilder.getPackageRegistry(packageDescr.getNamespace());
pkgBuilder.processOtherDeclarations(pkgRegistry, packageDescr);
pkgBuilder.compileAllRules(packageDescr, pkgRegistry);
+ pkgBuilder.setAssetFilter(null);
}
}
private void buildTypeDeclarations(Collection<CompositePackageDescr> packages) {
- for (PackageDescr packageDescr : packages) {
+ for (CompositePackageDescr packageDescr : packages) {
for (TypeDeclarationDescr typeDeclarationDescr : packageDescr.getTypeDeclarations()) {
if (pkgBuilder.isEmpty( typeDeclarationDescr.getNamespace() )) {
typeDeclarationDescr.setNamespace( packageDescr.getNamespace() ); // set the default namespace
@@ -243,7 +279,7 @@ private void buildTypeDeclarations(Collection<CompositePackageDescr> packages) {
}
List<PackageBuilder.TypeDefinition> unresolvedTypes = new ArrayList<PackageBuilder.TypeDefinition>();
- for (PackageDescr packageDescr : packages) {
+ for (CompositePackageDescr packageDescr : packages) {
buildTypeDeclarations(packageDescr, unresolvedTypes);
}
@@ -251,21 +287,24 @@ private void buildTypeDeclarations(Collection<CompositePackageDescr> packages) {
pkgBuilder.processUnresolvedType(pkgBuilder.getPackageRegistry(unresolvedType.getNamespace()), unresolvedType);
}
- for (PackageDescr packageDescr : packages) {
+ for (CompositePackageDescr packageDescr : packages) {
for (ImportDescr importDescr : packageDescr.getImports()) {
pkgBuilder.getPackageRegistry(packageDescr.getNamespace()).addImport( importDescr );
}
}
}
- private List<PackageBuilder.TypeDefinition> buildTypeDeclarations(PackageDescr packageDescr, List<PackageBuilder.TypeDefinition> unresolvedTypes) {
+ private List<PackageBuilder.TypeDefinition> buildTypeDeclarations(CompositePackageDescr packageDescr, List<PackageBuilder.TypeDefinition> unresolvedTypes) {
+ pkgBuilder.setAssetFilter(packageDescr.getFilter());
PackageRegistry pkgRegistry = pkgBuilder.initPackageRegistry(packageDescr);
if (pkgRegistry == null) {
return null;
}
pkgBuilder.processEntryPointDeclarations(pkgRegistry, packageDescr);
- return pkgBuilder.processTypeDeclarations(pkgRegistry, packageDescr, unresolvedTypes);
+ List<TypeDefinition> processTypeDeclarations = pkgBuilder.processTypeDeclarations(pkgRegistry, packageDescr, unresolvedTypes);
+ pkgBuilder.setAssetFilter(null);
+ return processTypeDeclarations;
}
private Collection<CompositePackageDescr> buildPackageDescr() {
@@ -287,7 +326,7 @@ private void buildResource(Map<String, CompositePackageDescr> packages, Resource
if (resourcesByType != null) {
for (ResourceDescr resourceDescr : resourcesByType) {
try {
- registerPackageDescr(packages, resourceDescr.resource, mapper.map(pkgBuilder, resourceDescr));
+ registerPackageDescr(resourceDescr, packages, resourceDescr.resource, mapper.map(pkgBuilder, resourceDescr));
} catch (RuntimeException e) {
if (buildException == null) {
buildException = e;
@@ -301,24 +340,58 @@ private void buildResource(Map<String, CompositePackageDescr> packages, Resource
}
}
- private void registerPackageDescr(Map<String, CompositePackageDescr> packages, Resource resource, PackageDescr packageDescr) {
+ private void registerPackageDescr(ResourceDescr resourceDescr, Map<String, CompositePackageDescr> packages, Resource resource, PackageDescr packageDescr) {
if (packageDescr != null) {
CompositePackageDescr compositePackageDescr = packages.get(packageDescr.getNamespace());
if (compositePackageDescr == null) {
- packages.put(packageDescr.getNamespace(), new CompositePackageDescr(resource, packageDescr));
+ compositePackageDescr = new CompositePackageDescr(resource, packageDescr);
+ packages.put(packageDescr.getNamespace(), compositePackageDescr);
} else {
compositePackageDescr.addPackageDescr(resource, packageDescr);
}
+ compositePackageDescr.addFilter( resourceDescr.getFilter() );
}
}
private static class ResourceDescr {
final Resource resource;
final ResourceConfiguration configuration;
+ final ResourceChangeSet changes;
+ final Map<String, ResourceChange> changeMap;
- private ResourceDescr(ResourceConfiguration configuration, Resource resource) {
+ private ResourceDescr(ResourceConfiguration configuration, Resource resource, ResourceChangeSet changes) {
this.configuration = configuration;
this.resource = resource;
+ this.changes = changes;
+ if( changes != null ) {
+ changeMap = new HashMap<String, ResourceChange>();
+ for( ResourceChange c : changes.getChanges() ) {
+ changeMap.put(c.getName(), c);
+ }
+ } else {
+ changeMap = null;
+ }
+ }
+
+ public PackageBuilder.AssetFilter getFilter() {
+ return changeMap == null ? null : this.new ChangeSetAssetFilter();
+ }
+
+ private class ChangeSetAssetFilter implements PackageBuilder.AssetFilter {
+ @Override
+ public Action accept(String pkgName, String assetName) {
+ ResourceChange change = changeMap.get(assetName);
+ if( change == null ) {
+ return Action.DO_NOTHING;
+ } else if( change.getChangeType().equals(ChangeType.ADDED) ) {
+ return Action.ADD;
+ } else if( change.getChangeType().equals(ChangeType.REMOVED) ) {
+ return Action.REMOVE;
+ } else if( change.getChangeType().equals(ChangeType.UPDATED) ) {
+ return Action.UPDATE;
+ }
+ return Action.DO_NOTHING;
+ }
}
}
diff --git a/drools-compiler/src/main/java/org/drools/compiler/compiler/PackageBuilder.java b/drools-compiler/src/main/java/org/drools/compiler/compiler/PackageBuilder.java
index 7b69a2f38eb..e2bbef06fba 100644
--- a/drools-compiler/src/main/java/org/drools/compiler/compiler/PackageBuilder.java
+++ b/drools-compiler/src/main/java/org/drools/compiler/compiler/PackageBuilder.java
@@ -47,6 +47,7 @@
import java.util.Stack;
import org.drools.compiler.commons.jci.problems.CompilationProblem;
+import org.drools.compiler.compiler.PackageBuilder.AssetFilter.Action;
import org.drools.compiler.compiler.xml.XmlPackageReader;
import org.drools.compiler.lang.ExpanderException;
import org.drools.compiler.lang.descr.AbstractClassTypeDeclarationDescr;
@@ -198,7 +199,7 @@ public class PackageBuilder
private ClassLoader rootClassLoader;
- private final Map<String, Class< ? >> globals;
+ private final Map<String, Class<?>> globals;
private Resource resource;
@@ -233,12 +234,14 @@ public class PackageBuilder
private int currentRulePackage = 0;
+ private AssetFilter assetFilter = null;
+
/**
* Use this when package is starting from scratch.
*/
public PackageBuilder() {
- this( (RuleBase) null,
- null );
+ this((RuleBase) null,
+ null);
}
/**
@@ -246,13 +249,13 @@ public PackageBuilder() {
*/
public PackageBuilder(final Package pkg) {
- this( pkg,
- null );
+ this(pkg,
+ null);
}
public PackageBuilder(final RuleBase ruleBase) {
- this( ruleBase,
- null );
+ this(ruleBase,
+ null);
}
/**
@@ -266,20 +269,20 @@ public PackageBuilder(final RuleBase ruleBase) {
* @param configuration
*/
public PackageBuilder(final PackageBuilderConfiguration configuration) {
- this( (RuleBase) null,
- configuration );
+ this((RuleBase) null,
+ configuration);
}
public PackageBuilder(Package pkg,
- PackageBuilderConfiguration configuration) {
- if ( configuration == null ) {
+ PackageBuilderConfiguration configuration) {
+ if (configuration == null) {
this.configuration = new PackageBuilderConfiguration();
} else {
this.configuration = configuration;
}
this.dateFormats = null;//(DateFormats) this.environment.get( EnvironmentName.DATE_FORMATS );
- if ( this.dateFormats == null ) {
+ if (this.dateFormats == null) {
this.dateFormats = new DateFormatsImpl();
//this.environment.set( EnvironmentName.DATE_FORMATS , this.dateFormats );
}
@@ -291,18 +294,18 @@ public PackageBuilder(Package pkg,
this.pkgRegistryMap = new LinkedHashMap<String, PackageRegistry>();
this.results = new ArrayList<KnowledgeBuilderResult>();
- PackageRegistry pkgRegistry = new PackageRegistry( this,
- pkg );
- pkgRegistry.setDialect( this.defaultDialect );
- this.pkgRegistryMap.put( pkg.getName(),
- pkgRegistry );
+ PackageRegistry pkgRegistry = new PackageRegistry(this,
+ pkg);
+ pkgRegistry.setDialect(this.defaultDialect);
+ this.pkgRegistryMap.put(pkg.getName(),
+ pkgRegistry);
// add imports to pkg registry
- for ( final ImportDeclaration implDecl : pkg.getImports().values() ) {
- pkgRegistry.addImport( new ImportDescr( implDecl.getTarget() ) );
+ for (final ImportDeclaration implDecl : pkg.getImports().values()) {
+ pkgRegistry.addImport(new ImportDescr(implDecl.getTarget()));
}
- globals = new HashMap<String, Class< ? >>();
+ globals = new HashMap<String, Class<?>>();
processBuilder = createProcessBuilder();
@@ -311,21 +314,21 @@ public PackageBuilder(Package pkg,
}
public PackageBuilder(RuleBase ruleBase,
- PackageBuilderConfiguration configuration) {
- if ( configuration == null ) {
+ PackageBuilderConfiguration configuration) {
+ if (configuration == null) {
this.configuration = new PackageBuilderConfiguration();
} else {
this.configuration = configuration;
}
- if ( ruleBase != null ) {
+ if (ruleBase != null) {
this.rootClassLoader = ((InternalRuleBase) ruleBase).getRootClassLoader();
} else {
this.rootClassLoader = this.configuration.getClassLoader();
}
this.dateFormats = null;//(DateFormats) this.environment.get( EnvironmentName.DATE_FORMATS );
- if ( this.dateFormats == null ) {
+ if (this.dateFormats == null) {
this.dateFormats = new DateFormatsImpl();
//this.environment.set( EnvironmentName.DATE_FORMATS , this.dateFormats );
}
@@ -339,7 +342,7 @@ public PackageBuilder(RuleBase ruleBase,
this.ruleBase = (ReteooRuleBase) ruleBase;
- globals = new HashMap<String, Class< ? >>();
+ globals = new HashMap<String, Class<?>>();
processBuilder = createProcessBuilder();
@@ -348,70 +351,70 @@ public PackageBuilder(RuleBase ruleBase,
}
public PackageBuilder deepClone() {
- PackageBuilder clone = new PackageBuilder( configuration );
+ PackageBuilder clone = new PackageBuilder(configuration);
clone.rootClassLoader = rootClassLoader;
- for ( Map.Entry<String, PackageRegistry> entry : pkgRegistryMap.entrySet() ) {
- clone.pkgRegistryMap.put( entry.getKey(), entry.getValue().clonePackage( rootClassLoader ) );
+ for (Map.Entry<String, PackageRegistry> entry : pkgRegistryMap.entrySet()) {
+ clone.pkgRegistryMap.put(entry.getKey(), entry.getValue().clonePackage(rootClassLoader));
}
- clone.results.addAll( results );
- clone.ruleBase = ClassUtils.deepClone( ruleBase, rootClassLoader );
- clone.globals.putAll( globals );
- if ( dslFiles != null ) {
+ clone.results.addAll(results);
+ clone.ruleBase = ClassUtils.deepClone(ruleBase, rootClassLoader);
+ clone.globals.putAll(globals);
+ if (dslFiles != null) {
clone.dslFiles = new ArrayList<DSLTokenizedMappingFile>();
- clone.dslFiles.addAll( dslFiles );
+ clone.dslFiles.addAll(dslFiles);
}
- if ( cacheTypes != null ) {
+ if (cacheTypes != null) {
clone.cacheTypes = new HashMap<String, TypeDeclaration>();
- clone.cacheTypes.putAll( cacheTypes );
+ clone.cacheTypes.putAll(cacheTypes);
}
- clone.packageAttributes.putAll( packageAttributes );
- for ( Map.Entry<String, List<PackageDescr>> entry : packages.entrySet() ) {
- clone.packages.put( entry.getKey(), new ArrayList<PackageDescr>( entry.getValue() ) );
+ clone.packageAttributes.putAll(packageAttributes);
+ for (Map.Entry<String, List<PackageDescr>> entry : packages.entrySet()) {
+ clone.packages.put(entry.getKey(), new ArrayList<PackageDescr>(entry.getValue()));
}
- clone.packages.putAll( packages );
+ clone.packages.putAll(packages);
clone.currentRulePackage = currentRulePackage;
return clone;
}
private void initBuiltinTypeDeclarations() {
- TypeDeclaration colType = new TypeDeclaration( "Collection" );
- colType.setTypesafe( false );
- colType.setTypeClass( Collection.class );
- builtinTypes.put( "java.util.Collection",
- colType );
-
- TypeDeclaration mapType = new TypeDeclaration( "Map" );
- mapType.setTypesafe( false );
- mapType.setTypeClass( Map.class );
- builtinTypes.put( "java.util.Map",
- mapType );
-
- TypeDeclaration activationType = new TypeDeclaration( "Match" );
- activationType.setTypesafe( false );
- activationType.setTypeClass( Match.class );
- builtinTypes.put( Match.class.getCanonicalName(),
- activationType );
-
- TypeDeclaration thingType = new TypeDeclaration( Thing.class.getSimpleName() );
- thingType.setKind( TypeDeclaration.Kind.TRAIT );
- thingType.setTypeClass( Thing.class );
- builtinTypes.put( Thing.class.getCanonicalName(),
- thingType );
+ TypeDeclaration colType = new TypeDeclaration("Collection");
+ colType.setTypesafe(false);
+ colType.setTypeClass(Collection.class);
+ builtinTypes.put("java.util.Collection",
+ colType);
+
+ TypeDeclaration mapType = new TypeDeclaration("Map");
+ mapType.setTypesafe(false);
+ mapType.setTypeClass(Map.class);
+ builtinTypes.put("java.util.Map",
+ mapType);
+
+ TypeDeclaration activationType = new TypeDeclaration("Match");
+ activationType.setTypesafe(false);
+ activationType.setTypeClass(Match.class);
+ builtinTypes.put(Match.class.getCanonicalName(),
+ activationType);
+
+ TypeDeclaration thingType = new TypeDeclaration(Thing.class.getSimpleName());
+ thingType.setKind(TypeDeclaration.Kind.TRAIT);
+ thingType.setTypeClass(Thing.class);
+ builtinTypes.put(Thing.class.getCanonicalName(),
+ thingType);
}
private ProcessBuilder createProcessBuilder() {
try {
- return ProcessBuilderFactory.newProcessBuilder( this );
- } catch ( IllegalArgumentException e ) {
+ return ProcessBuilderFactory.newProcessBuilder(this);
+ } catch (IllegalArgumentException e) {
processBuilderCreationFailure = e;
return null;
}
}
private PMMLCompiler getPMMLCompiler() {
- if ( this.pmmlCompiler == null ) {
+ if (this.pmmlCompiler == null) {
this.pmmlCompiler = PMMLCompilerFactory.getPMMLCompiler();
}
return this.pmmlCompiler;
@@ -425,8 +428,8 @@ private PMMLCompiler getPMMLCompiler() {
* @throws IOException
*/
public void addPackageFromDrl(final Reader reader) throws DroolsParserException,
- IOException {
- addPackageFromDrl( reader, new ReaderResource( reader, ResourceType.DRL ) );
+ IOException {
+ addPackageFromDrl(reader, new ReaderResource(reader, ResourceType.DRL));
}
/**
@@ -439,93 +442,92 @@ public void addPackageFromDrl(final Reader reader) throws DroolsParserException,
* @throws IOException
*/
public void addPackageFromDrl(final Reader reader,
- final Resource sourceResource) throws DroolsParserException,
- IOException {
+ final Resource sourceResource) throws DroolsParserException,
+ IOException {
this.resource = sourceResource;
- final DrlParser parser = new DrlParser( configuration.getLanguageLevel() );
- final PackageDescr pkg = parser.parse( sourceResource, reader );
- this.results.addAll( parser.getErrors() );
- if ( pkg == null ) {
- this.results.add( new ParserError( sourceResource, "Parser returned a null Package", 0, 0 ) );
+ final DrlParser parser = new DrlParser(configuration.getLanguageLevel());
+ final PackageDescr pkg = parser.parse(sourceResource, reader);
+ this.results.addAll(parser.getErrors());
+ if (pkg == null) {
+ this.results.add(new ParserError(sourceResource, "Parser returned a null Package", 0, 0));
}
- if ( !parser.hasErrors() ) {
- addPackage( pkg );
+ if (!parser.hasErrors()) {
+ addPackage(pkg);
}
this.resource = null;
}
public void addPackageFromDecisionTable(Resource resource,
- ResourceConfiguration configuration) throws DroolsParserException,
- IOException {
+ ResourceConfiguration configuration) throws DroolsParserException,
+ IOException {
this.resource = resource;
- addPackage( decisionTableToPackageDescr( resource, configuration ) );
+ addPackage(decisionTableToPackageDescr(resource, configuration));
this.resource = null;
}
PackageDescr decisionTableToPackageDescr(Resource resource,
- ResourceConfiguration configuration) throws DroolsParserException,
- IOException {
+ ResourceConfiguration configuration) throws DroolsParserException,
+ IOException {
DecisionTableConfiguration dtableConfiguration = (DecisionTableConfiguration) configuration;
- String string = DecisionTableFactory.loadFromInputStream( resource.getInputStream(), dtableConfiguration );
+ String string = DecisionTableFactory.loadFromInputStream(resource.getInputStream(), dtableConfiguration);
- DrlParser parser = new DrlParser( this.configuration.getLanguageLevel() );
- PackageDescr pkg = parser.parse( resource, new StringReader( string ) );
- this.results.addAll( parser.getErrors() );
- if ( pkg == null ) {
- this.results.add( new ParserError( resource, "Parser returned a null Package", 0, 0 ) );
+ DrlParser parser = new DrlParser(this.configuration.getLanguageLevel());
+ PackageDescr pkg = parser.parse(resource, new StringReader(string));
+ this.results.addAll(parser.getErrors());
+ if (pkg == null) {
+ this.results.add(new ParserError(resource, "Parser returned a null Package", 0, 0));
}
return parser.hasErrors() ? null : pkg;
}
public void addPackageFromScoreCard(Resource resource,
- ResourceConfiguration configuration) throws DroolsParserException,
+ ResourceConfiguration configuration) throws DroolsParserException,
IOException {
this.resource = resource;
- addPackage( scoreCardToPackageDescr( resource, configuration ) );
+ addPackage(scoreCardToPackageDescr(resource, configuration));
this.resource = null;
}
PackageDescr scoreCardToPackageDescr(Resource resource,
- ResourceConfiguration configuration) throws DroolsParserException,
+ ResourceConfiguration configuration) throws DroolsParserException,
IOException {
ScoreCardConfiguration scardConfiguration = (ScoreCardConfiguration) configuration;
- String string = ScoreCardFactory.loadFromInputStream( resource.getInputStream(), scardConfiguration );
+ String string = ScoreCardFactory.loadFromInputStream(resource.getInputStream(), scardConfiguration);
- DrlParser parser = new DrlParser( this.configuration.getLanguageLevel() );
- PackageDescr pkg = parser.parse( resource, new StringReader( string ) );
- this.results.addAll( parser.getErrors() );
- if ( pkg == null ) {
- this.results.add( new ParserError( resource, "Parser returned a null Package", 0, 0 ) );
+ DrlParser parser = new DrlParser(this.configuration.getLanguageLevel());
+ PackageDescr pkg = parser.parse(resource, new StringReader(string));
+ this.results.addAll(parser.getErrors());
+ if (pkg == null) {
+ this.results.add(new ParserError(resource, "Parser returned a null Package", 0, 0));
}
return parser.hasErrors() ? null : pkg;
}
-
public void addPackageFromDrl(Resource resource) throws DroolsParserException,
- IOException {
+ IOException {
this.resource = resource;
- addPackage( drlToPackageDescr( resource ) );
+ addPackage(drlToPackageDescr(resource));
this.resource = null;
}
PackageDescr drlToPackageDescr(Resource resource) throws DroolsParserException,
- IOException {
+ IOException {
PackageDescr pkg;
boolean hasErrors = false;
- if ( resource instanceof DescrResource ) {
+ if (resource instanceof DescrResource) {
pkg = (PackageDescr) ((DescrResource) resource).getDescr();
} else {
- final DrlParser parser = new DrlParser( configuration.getLanguageLevel() );
- pkg = parser.parse( resource );
- this.results.addAll( parser.getErrors() );
- if ( pkg == null ) {
- this.results.add( new ParserError( resource, "Parser returned a null Package", 0, 0 ) );
+ final DrlParser parser = new DrlParser(configuration.getLanguageLevel());
+ pkg = parser.parse(resource);
+ this.results.addAll(parser.getErrors());
+ if (pkg == null) {
+ this.results.add(new ParserError(resource, "Parser returned a null Package", 0, 0));
}
hasErrors = parser.hasErrors();
}
- if ( pkg != null ) {
- pkg.setResource( resource );
+ if (pkg != null) {
+ pkg.setResource(resource);
}
return hasErrors ? null : pkg;
}
@@ -538,43 +540,43 @@ PackageDescr drlToPackageDescr(Resource resource) throws DroolsParserException,
* @throws IOException
*/
public void addPackageFromXml(final Reader reader) throws DroolsParserException,
- IOException {
- this.resource = new ReaderResource( reader, ResourceType.XDRL );
- final XmlPackageReader xmlReader = new XmlPackageReader( this.configuration.getSemanticModules() );
- xmlReader.getParser().setClassLoader( this.rootClassLoader );
+ IOException {
+ this.resource = new ReaderResource(reader, ResourceType.XDRL);
+ final XmlPackageReader xmlReader = new XmlPackageReader(this.configuration.getSemanticModules());
+ xmlReader.getParser().setClassLoader(this.rootClassLoader);
try {
- xmlReader.read( reader );
- } catch ( final SAXException e ) {
- throw new DroolsParserException( e.toString(),
- e.getCause() );
+ xmlReader.read(reader);
+ } catch (final SAXException e) {
+ throw new DroolsParserException(e.toString(),
+ e.getCause());
}
- addPackage( xmlReader.getPackageDescr() );
+ addPackage(xmlReader.getPackageDescr());
this.resource = null;
}
public void addPackageFromXml(final Resource resource) throws DroolsParserException,
- IOException {
+ IOException {
this.resource = resource;
- addPackage( xmlToPackageDescr( resource ) );
+ addPackage(xmlToPackageDescr(resource));
this.resource = null;
}
PackageDescr xmlToPackageDescr(Resource resource) throws DroolsParserException,
- IOException {
- final XmlPackageReader xmlReader = new XmlPackageReader( this.configuration.getSemanticModules() );
- xmlReader.getParser().setClassLoader( this.rootClassLoader );
+ IOException {
+ final XmlPackageReader xmlReader = new XmlPackageReader(this.configuration.getSemanticModules());
+ xmlReader.getParser().setClassLoader(this.rootClassLoader);
Reader reader = null;
try {
reader = resource.getReader();
- xmlReader.read( reader );
- } catch ( final SAXException e ) {
- throw new DroolsParserException( e.toString(),
- e.getCause() );
+ xmlReader.read(reader);
+ } catch (final SAXException e) {
+ throw new DroolsParserException(e.toString(),
+ e.getCause());
} finally {
- if ( reader != null ) {
+ if (reader != null) {
reader.close();
}
}
@@ -592,23 +594,23 @@ PackageDescr xmlToPackageDescr(Resource resource) throws DroolsParserException,
* @throws IOException
*/
public void addPackageFromDrl(final Reader source,
- final Reader dsl) throws DroolsParserException,
- IOException {
- this.resource = new ReaderResource( source, ResourceType.DSLR );
+ final Reader dsl) throws DroolsParserException,
+ IOException {
+ this.resource = new ReaderResource(source, ResourceType.DSLR);
- final DrlParser parser = new DrlParser( configuration.getLanguageLevel() );
- final PackageDescr pkg = parser.parse( source, dsl );
- this.results.addAll( parser.getErrors() );
- if ( !parser.hasErrors() ) {
- addPackage( pkg );
+ final DrlParser parser = new DrlParser(configuration.getLanguageLevel());
+ final PackageDescr pkg = parser.parse(source, dsl);
+ this.results.addAll(parser.getErrors());
+ if (!parser.hasErrors()) {
+ addPackage(pkg);
}
this.resource = null;
}
public void addPackageFromDslr(final Resource resource) throws DroolsParserException,
- IOException {
+ IOException {
this.resource = resource;
- addPackage( dslrToPackageDescr( resource ) );
+ addPackage(dslrToPackageDescr(resource));
this.resource = null;
}
@@ -616,33 +618,33 @@ PackageDescr dslrToPackageDescr(Resource resource) throws DroolsParserException
boolean hasErrors;
PackageDescr pkg;
- DrlParser parser = new DrlParser( configuration.getLanguageLevel() );
+ DrlParser parser = new DrlParser(configuration.getLanguageLevel());
DefaultExpander expander = getDslExpander();
Reader reader = null;
try {
- if ( expander == null ) {
+ if (expander == null) {
expander = new DefaultExpander();
}
reader = resource.getReader();
- String str = expander.expand( reader );
- if ( expander.hasErrors() ) {
+ String str = expander.expand(reader);
+ if (expander.hasErrors()) {
for (ExpanderException error : expander.getErrors()) {
- error.setResource( resource );
- this.results.add( error );
+ error.setResource(resource);
+ this.results.add(error);
}
}
- pkg = parser.parse( resource, str );
- this.results.addAll( parser.getErrors() );
+ pkg = parser.parse(resource, str);
+ this.results.addAll(parser.getErrors());
hasErrors = parser.hasErrors();
- } catch ( IOException e ) {
- throw new RuntimeException( e );
+ } catch (IOException e) {
+ throw new RuntimeException(e);
} finally {
- if ( reader != null ) {
+ if (reader != null) {
try {
reader.close();
- } catch ( IOException e ) {
+ } catch (IOException e) {
}
}
}
@@ -656,15 +658,15 @@ public void addDsl(Resource resource) throws IOException {
Reader reader = null;
try {
reader = resource.getReader();
- if ( !file.parseAndLoad( reader ) ) {
- this.results.addAll( file.getErrors() );
+ if (!file.parseAndLoad(reader)) {
+ this.results.addAll(file.getErrors());
}
- if ( this.dslFiles == null ) {
+ if (this.dslFiles == null) {
this.dslFiles = new ArrayList<DSLTokenizedMappingFile>();
}
- this.dslFiles.add( file );
+ this.dslFiles.add(file);
} finally {
- if ( reader != null ) {
+ if (reader != null) {
reader.close();
}
this.resource = null;
@@ -676,135 +678,135 @@ public void addDsl(Resource resource) throws IOException {
* Add a ruleflow (.rfm) asset to this package.
*/
public void addRuleFlow(Reader processSource) {
- addProcessFromXml( processSource );
+ addProcessFromXml(processSource);
}
public void addProcessFromXml(Resource resource) {
- if ( processBuilder == null ) {
- throw new RuntimeException( "Unable to instantiate a process builder", processBuilderCreationFailure );
+ if (processBuilder == null) {
+ throw new RuntimeException("Unable to instantiate a process builder", processBuilderCreationFailure);
}
- if ( ResourceType.DRF.equals( resource.getResourceType() ) ) {
- this.results.add( new DeprecatedResourceTypeWarning( resource, "RF" ) );
+ if (ResourceType.DRF.equals(resource.getResourceType())) {
+ this.results.add(new DeprecatedResourceTypeWarning(resource, "RF"));
}
this.resource = resource;
try {
- this.results.addAll( processBuilder.addProcessFromXml( resource ) );
- } catch ( Exception e ) {
- if ( e instanceof RuntimeException ) {
+ this.results.addAll(processBuilder.addProcessFromXml(resource));
+ } catch (Exception e) {
+ if (e instanceof RuntimeException) {
throw (RuntimeException) e;
}
- this.results.add( new ProcessLoadError( resource, "Unable to load process.", e ) );
+ this.results.add(new ProcessLoadError(resource, "Unable to load process.", e));
}
- this.results = getResults( this.results );
+ this.results = getResults(this.results);
this.resource = null;
}
public void addProcessFromXml(Reader processSource) {
- addProcessFromXml( new ReaderResource( processSource, ResourceType.DRF ) );
+ addProcessFromXml(new ReaderResource(processSource, ResourceType.DRF));
}
public void addKnowledgeResource(Resource resource,
- ResourceType type,
- ResourceConfiguration configuration) {
+ ResourceType type,
+ ResourceConfiguration configuration) {
try {
- ((InternalResource) resource).setResourceType( type );
- if ( ResourceType.DRL.equals( type ) ) {
- addPackageFromDrl( resource );
- } else if ( ResourceType.GDRL.equals( type ) ) {
- addPackageFromDrl( resource );
- } else if ( ResourceType.RDRL.equals( type ) ) {
- addPackageFromDrl( resource );
- } else if ( ResourceType.DESCR.equals( type ) ) {
- addPackageFromDrl( resource );
- } else if ( ResourceType.DSLR.equals( type ) ) {
- addPackageFromDslr( resource );
- } else if ( ResourceType.RDSLR.equals( type ) ) {
- addPackageFromDslr( resource );
- } else if ( ResourceType.DSL.equals( type ) ) {
- addDsl( resource );
- } else if ( ResourceType.XDRL.equals( type ) ) {
- addPackageFromXml( resource );
- } else if ( ResourceType.DRF.equals( type ) ) {
- addProcessFromXml( resource );
- } else if ( ResourceType.BPMN2.equals( type ) ) {
- BPMN2ProcessFactory.configurePackageBuilder( this );
- addProcessFromXml( resource );
- } else if ( ResourceType.DTABLE.equals( type ) ) {
- addPackageFromDecisionTable( resource, configuration );
- } else if ( ResourceType.PKG.equals( type ) ) {
- addPackageFromInputStream( resource );
- } else if ( ResourceType.CHANGE_SET.equals( type ) ) {
- addPackageFromChangeSet( resource );
- } else if ( ResourceType.XSD.equals( type ) ) {
- addPackageFromXSD( resource, (JaxbConfigurationImpl) configuration );
- } else if ( ResourceType.PMML.equals( type ) ) {
- addPackageFromPMML( resource, type, configuration );
- } else if ( ResourceType.SCARD.equals( type ) ) {
- addPackageFromScoreCard( resource, configuration );
+ ((InternalResource) resource).setResourceType(type);
+ if (ResourceType.DRL.equals(type)) {
+ addPackageFromDrl(resource);
+ } else if (ResourceType.GDRL.equals(type)) {
+ addPackageFromDrl(resource);
+ } else if (ResourceType.RDRL.equals(type)) {
+ addPackageFromDrl(resource);
+ } else if (ResourceType.DESCR.equals(type)) {
+ addPackageFromDrl(resource);
+ } else if (ResourceType.DSLR.equals(type)) {
+ addPackageFromDslr(resource);
+ } else if (ResourceType.RDSLR.equals(type)) {
+ addPackageFromDslr(resource);
+ } else if (ResourceType.DSL.equals(type)) {
+ addDsl(resource);
+ } else if (ResourceType.XDRL.equals(type)) {
+ addPackageFromXml(resource);
+ } else if (ResourceType.DRF.equals(type)) {
+ addProcessFromXml(resource);
+ } else if (ResourceType.BPMN2.equals(type)) {
+ BPMN2ProcessFactory.configurePackageBuilder(this);
+ addProcessFromXml(resource);
+ } else if (ResourceType.DTABLE.equals(type)) {
+ addPackageFromDecisionTable(resource, configuration);
+ } else if (ResourceType.PKG.equals(type)) {
+ addPackageFromInputStream(resource);
+ } else if (ResourceType.CHANGE_SET.equals(type)) {
+ addPackageFromChangeSet(resource);
+ } else if (ResourceType.XSD.equals(type)) {
+ addPackageFromXSD(resource, (JaxbConfigurationImpl) configuration);
+ } else if (ResourceType.PMML.equals(type)) {
+ addPackageFromPMML(resource, type, configuration);
+ } else if (ResourceType.SCARD.equals(type)) {
+ addPackageFromScoreCard(resource, configuration);
} else {
- addPackageForExternalType( resource, type, configuration );
+ addPackageForExternalType(resource, type, configuration);
}
- } catch ( RuntimeException e ) {
+ } catch (RuntimeException e) {
throw e;
- } catch ( Exception e ) {
- throw new RuntimeException( e );
+ } catch (Exception e) {
+ throw new RuntimeException(e);
}
}
void addPackageForExternalType(Resource resource,
- ResourceType type,
- ResourceConfiguration configuration) throws Exception {
- ResourceTypeBuilder builder = ResourceTypeBuilderRegistry.getInstance().getResourceTypeBuilder( type );
- if ( builder != null ) {
- builder.setPackageBuilder( this );
- builder.addKnowledgeResource( resource,
- type,
- configuration );
+ ResourceType type,
+ ResourceConfiguration configuration) throws Exception {
+ ResourceTypeBuilder builder = ResourceTypeBuilderRegistry.getInstance().getResourceTypeBuilder(type);
+ if (builder != null) {
+ builder.setPackageBuilder(this);
+ builder.addKnowledgeResource(resource,
+ type,
+ configuration);
} else {
- throw new RuntimeException( "Unknown resource type: " + type );
+ throw new RuntimeException("Unknown resource type: " + type);
}
}
public void addPackageFromPMML(Resource resource,
- ResourceType type,
- ResourceConfiguration configuration) throws Exception {
+ ResourceType type,
+ ResourceConfiguration configuration) throws Exception {
PMMLCompiler compiler = getPMMLCompiler();
- if ( compiler != null ) {
- if ( compiler.getResults().isEmpty() ) {
+ if (compiler != null) {
+ if (compiler.getResults().isEmpty()) {
this.resource = resource;
PackageDescr descr = pmmlModelToPackageDescr(compiler, resource);
- if ( descr != null ) {
- addPackage( descr );
+ if (descr != null) {
+ addPackage(descr);
}
this.resource = null;
} else {
- this.results.addAll( compiler.getResults() );
+ this.results.addAll(compiler.getResults());
}
compiler.clearResults();
} else {
- addPackageForExternalType( resource, type, configuration );
+ addPackageForExternalType(resource, type, configuration);
}
}
PackageDescr pmmlModelToPackageDescr(PMMLCompiler compiler,
- Resource resource) throws DroolsParserException,
- IOException {
- String theory = compiler.compile( resource.getInputStream(),
- getPackageRegistry() );
+ Resource resource) throws DroolsParserException,
+ IOException {
+ String theory = compiler.compile(resource.getInputStream(),
+ getPackageRegistry());
- if ( ! compiler.getResults().isEmpty() ) {
- this.results.addAll( compiler.getResults() );
+ if (!compiler.getResults().isEmpty()) {
+ this.results.addAll(compiler.getResults());
return null;
}
- DrlParser parser = new DrlParser( configuration.getLanguageLevel() );
- PackageDescr pkg = parser.parse( resource, new StringReader( theory ) );
- this.results.addAll( parser.getErrors() );
- if ( pkg == null ) {
- this.results.add( new ParserError( resource, "Parser returned a null Package", 0, 0 ) );
+ DrlParser parser = new DrlParser(configuration.getLanguageLevel());
+ PackageDescr pkg = parser.parse(resource, new StringReader(theory));
+ this.results.addAll(parser.getErrors());
+ if (pkg == null) {
+ this.results.add(new ParserError(resource, "Parser returned a null Package", 0, 0));
return pkg;
} else {
return parser.hasErrors() ? null : pkg;
@@ -812,90 +814,91 @@ PackageDescr pmmlModelToPackageDescr(PMMLCompiler compiler,
}
void addPackageFromXSD(Resource resource,
- JaxbConfigurationImpl configuration) throws IOException {
- String[] classes = DroolsJaxbHelperProviderImpl.addXsdModel( resource,
- this,
- configuration.getXjcOpts(),
- configuration.getSystemId() );
- for ( String cls : classes ) {
- configuration.getClasses().add( cls );
+ JaxbConfigurationImpl configuration) throws IOException {
+ String[] classes = DroolsJaxbHelperProviderImpl.addXsdModel(resource,
+ this,
+ configuration.getXjcOpts(),
+ configuration.getSystemId());
+ for (String cls : classes) {
+ configuration.getClasses().add(cls);
}
}
void addPackageFromChangeSet(Resource resource) throws SAXException,
- IOException {
- XmlChangeSetReader reader = new XmlChangeSetReader( this.configuration.getSemanticModules() );
- if ( resource instanceof ClassPathResource ) {
- reader.setClassLoader( ((ClassPathResource) resource).getClassLoader(),
- ((ClassPathResource) resource).getClazz() );
+ IOException {
+ XmlChangeSetReader reader = new XmlChangeSetReader(this.configuration.getSemanticModules());
+ if (resource instanceof ClassPathResource) {
+ reader.setClassLoader(((ClassPathResource) resource).getClassLoader(),
+ ((ClassPathResource) resource).getClazz());
} else {
- reader.setClassLoader( this.configuration.getClassLoader(),
- null );
+ reader.setClassLoader(this.configuration.getClassLoader(),
+ null);
}
Reader resourceReader = null;
try {
resourceReader = resource.getReader();
- ChangeSet changeSet = reader.read( resourceReader );
- if ( changeSet == null ) {
+ ChangeSet changeSet = reader.read(resourceReader);
+ if (changeSet == null) {
// @TODO should log an error
}
- for ( Resource nestedResource : changeSet.getResourcesAdded() ) {
+ for (Resource nestedResource : changeSet.getResourcesAdded()) {
InternalResource iNestedResourceResource = (InternalResource) nestedResource;
- if ( iNestedResourceResource.isDirectory() ) {
- for ( Resource childResource : iNestedResourceResource.listResources() ) {
- if ( ((InternalResource) childResource).isDirectory() ) {
+ if (iNestedResourceResource.isDirectory()) {
+ for (Resource childResource : iNestedResourceResource.listResources()) {
+ if (((InternalResource) childResource).isDirectory()) {
continue; // ignore sub directories
}
- ((InternalResource) childResource).setResourceType( iNestedResourceResource.getResourceType() );
- addKnowledgeResource( childResource,
- iNestedResourceResource.getResourceType(),
- iNestedResourceResource.getConfiguration() );
+ ((InternalResource) childResource).setResourceType(iNestedResourceResource.getResourceType());
+ addKnowledgeResource(childResource,
+ iNestedResourceResource.getResourceType(),
+ iNestedResourceResource.getConfiguration());
}
} else {
- addKnowledgeResource( iNestedResourceResource,
- iNestedResourceResource.getResourceType(),
- iNestedResourceResource.getConfiguration() );
+ addKnowledgeResource(iNestedResourceResource,
+ iNestedResourceResource.getResourceType(),
+ iNestedResourceResource.getConfiguration());
}
}
} finally {
- if ( resourceReader != null ) {
+ if (resourceReader != null) {
resourceReader.close();
}
}
}
void addPackageFromInputStream(final Resource resource) throws IOException,
- ClassNotFoundException {
+ ClassNotFoundException {
InputStream is = resource.getInputStream();
- Object object = DroolsStreamUtils.streamIn( is, this.configuration.getClassLoader() );
+ Object object = DroolsStreamUtils.streamIn(is, this.configuration.getClassLoader());
is.close();
- if ( object instanceof Collection ) {
+ if (object instanceof Collection) {
// KnowledgeBuilder API
@SuppressWarnings("unchecked")
Collection<KnowledgePackage> pkgs = (Collection<KnowledgePackage>) object;
- for ( KnowledgePackage kpkg : pkgs ) {
- overrideReSource( ((KnowledgePackageImp) kpkg).pkg, resource );
- addPackage( ((KnowledgePackageImp) kpkg).pkg );
+ for (KnowledgePackage kpkg : pkgs) {
+ overrideReSource(((KnowledgePackageImp) kpkg).pkg, resource);
+ addPackage(((KnowledgePackageImp) kpkg).pkg);
}
- } else if ( object instanceof KnowledgePackageImp ) {
+ } else if (object instanceof KnowledgePackageImp) {
// KnowledgeBuilder API
KnowledgePackageImp kpkg = (KnowledgePackageImp) object;
- overrideReSource( kpkg.pkg, resource );
- addPackage( kpkg.pkg );
- } else if ( object instanceof Package ) {
+ overrideReSource(kpkg.pkg, resource);
+ addPackage(kpkg.pkg);
+ } else if (object instanceof Package) {
// Old Drools 4 API
Package pkg = (Package) object;
- overrideReSource( pkg, resource );
- addPackage( pkg );
- } else if ( object instanceof Package[] ) {
+ overrideReSource(pkg, resource);
+ addPackage(pkg);
+ } else if (object instanceof Package[]) {
// Old Drools 4 API
Package[] pkgs = (Package[]) object;
- for ( Package pkg : pkgs ) {
- overrideReSource( pkg, resource );
- addPackage( pkg );
+ for (Package pkg : pkgs) {
+ overrideReSource(pkg, resource);
+ addPackage(pkg);
}
} else {
- results.add( new DroolsError( resource ) {
+ results.add(new DroolsError(resource) {
+
@Override
public String getMessage() {
return "Unknown binary format trying to load resource " + resource.toString();
@@ -905,38 +908,38 @@ public String getMessage() {
public int[] getLines() {
return new int[0];
}
- } );
+ });
}
}
private void overrideReSource(Package pkg,
- Resource res) {
- for ( Rule r : pkg.getRules() ) {
- if ( isSwappable( r.getResource(), res ) ) {
- r.setResource( res );
+ Resource res) {
+ for (Rule r : pkg.getRules()) {
+ if (isSwappable(r.getResource(), res)) {
+ r.setResource(res);
}
}
- for ( TypeDeclaration d : pkg.getTypeDeclarations().values() ) {
- if ( isSwappable( d.getResource(), res ) ) {
- d.setResource( res );
+ for (TypeDeclaration d : pkg.getTypeDeclarations().values()) {
+ if (isSwappable(d.getResource(), res)) {
+ d.setResource(res);
}
}
- for ( Function f : pkg.getFunctions().values() ) {
- if ( isSwappable( f.getResource(), res ) ) {
- f.setResource( res );
+ for (Function f : pkg.getFunctions().values()) {
+ if (isSwappable(f.getResource(), res)) {
+ f.setResource(res);
}
}
- for ( Process p : pkg.getRuleFlows().values() ) {
- if ( isSwappable( p.getResource(), res ) ) {
- p.setResource( res );
+ for (Process p : pkg.getRuleFlows().values()) {
+ if (isSwappable(p.getResource(), res)) {
+ p.setResource(res);
}
}
}
private boolean isSwappable(Resource original,
- Resource source) {
+ Resource source) {
return original == null
- || (original instanceof ReaderResource && ((ReaderResource) original).getReader() == null);
+ || (original instanceof ReaderResource && ((ReaderResource) original).getReader() == null);
}
/**
@@ -944,121 +947,127 @@ private boolean isSwappable(Resource original,
* there are any generated classes to compile of course.
*/
public void addPackage(final PackageDescr packageDescr) {
- PackageRegistry pkgRegistry = initPackageRegistry( packageDescr );
- if ( pkgRegistry == null ) {
+ PackageRegistry pkgRegistry = initPackageRegistry(packageDescr);
+ if (pkgRegistry == null) {
return;
}
currentRulePackage = pkgRegistryMap.size() - 1;
// merge into existing package
- mergePackage( pkgRegistry, packageDescr );
+ mergePackage(pkgRegistry, packageDescr);
- compileAllRules( packageDescr, pkgRegistry );
+ compileAllRules(packageDescr, pkgRegistry);
}
void compileAllRules(PackageDescr packageDescr,
- PackageRegistry pkgRegistry) {
- pkgRegistry.setDialect( getPackageDialect( packageDescr ) );
+ PackageRegistry pkgRegistry) {
+ pkgRegistry.setDialect(getPackageDialect(packageDescr));
// only try to compile if there are no parse errors
- if ( !hasErrors() ) {
- compileRules( packageDescr, pkgRegistry );
+ if (!hasErrors()) {
+ compileRules(packageDescr, pkgRegistry);
}
compileAll();
try {
reloadAll();
- } catch ( Exception e ) {
- this.results.add( new DialectError( null, "Unable to wire compiled classes, probably related to compilation failures:" + e.getMessage() ) );
+ } catch (Exception e) {
+ this.results.add(new DialectError(null, "Unable to wire compiled classes, probably related to compilation failures:" + e.getMessage()));
}
updateResults();
// iterate and compile
- if ( !hasErrors() && this.ruleBase != null ) {
- for ( RuleDescr ruleDescr : packageDescr.getRules() ) {
- pkgRegistry = this.pkgRegistryMap.get( ruleDescr.getNamespace() );
- this.ruleBase.addRule( pkgRegistry.getPackage(), pkgRegistry.getPackage().getRule( ruleDescr.getName() ) );
+ if (!hasErrors() && this.ruleBase != null) {
+ for (RuleDescr ruleDescr : packageDescr.getRules()) {
+ if( filterAccepts( ruleDescr.getNamespace(), ruleDescr.getName() ) ) {
+ pkgRegistry = this.pkgRegistryMap.get(ruleDescr.getNamespace());
+ this.ruleBase.addRule(pkgRegistry.getPackage(), pkgRegistry.getPackage().getRule(ruleDescr.getName()));
+ }
}
}
}
PackageRegistry initPackageRegistry(PackageDescr packageDescr) {
- if ( packageDescr == null ) {
+ if (packageDescr == null) {
return null;
}
//Derive namespace
- if ( isEmpty( packageDescr.getNamespace() ) ) {
- packageDescr.setNamespace( this.configuration.getDefaultPackageName() );
+ if (isEmpty(packageDescr.getNamespace())) {
+ packageDescr.setNamespace(this.configuration.getDefaultPackageName());
}
- validateUniqueRuleNames( packageDescr );
- if ( !checkNamespace( packageDescr.getNamespace() ) ) {
+ validateUniqueRuleNames(packageDescr);
+ if (!checkNamespace(packageDescr.getNamespace())) {
return null;
}
- initPackage( packageDescr );
+ initPackage(packageDescr);
- PackageRegistry pkgRegistry = this.pkgRegistryMap.get( packageDescr.getNamespace() );
- if ( pkgRegistry == null ) {
+ PackageRegistry pkgRegistry = this.pkgRegistryMap.get(packageDescr.getNamespace());
+ if (pkgRegistry == null) {
// initialise the package and namespace if it hasn't been used before
- pkgRegistry = newPackage( packageDescr );
+ pkgRegistry = newPackage(packageDescr);
}
return pkgRegistry;
}
private void compileRules(PackageDescr packageDescr,
- PackageRegistry pkgRegistry) {
+ PackageRegistry pkgRegistry) {
List<FunctionDescr> functions = packageDescr.getFunctions();
- if ( !functions.isEmpty() ) {
+ if (!functions.isEmpty()) {
- for ( FunctionDescr functionDescr : functions ) {
- if ( isEmpty( functionDescr.getNamespace() ) ) {
- // make sure namespace is set on components
- functionDescr.setNamespace( packageDescr.getNamespace() );
- }
+ for (FunctionDescr functionDescr : functions) {
+ if (filterAccepts(functionDescr.getNamespace(), functionDescr.getName()) ) {
+ if (isEmpty(functionDescr.getNamespace())) {
+ // make sure namespace is set on components
+ functionDescr.setNamespace(packageDescr.getNamespace());
+ }
- // make sure functions are compiled using java dialect
- functionDescr.setDialect( "java" );
+ // make sure functions are compiled using java dialect
+ functionDescr.setDialect("java");
- preCompileAddFunction( functionDescr );
+ preCompileAddFunction(functionDescr);
+ }
}
// iterate and compile
- for ( FunctionDescr functionDescr : functions ) {
- // inherit the dialect from the package
- addFunction( functionDescr );
+ for (FunctionDescr functionDescr : functions) {
+ if (filterAccepts(functionDescr.getNamespace(), functionDescr.getName()) ) {
+ // inherit the dialect from the package
+ addFunction(functionDescr);
+ }
}
// We need to compile all the functions now, so scripting
// languages like mvel can find them
compileAll();
- for ( FunctionDescr functionDescr : functions ) {
- postCompileAddFunction( functionDescr );
+ for (FunctionDescr functionDescr : functions) {
+ if (filterAccepts(functionDescr.getNamespace(), functionDescr.getName()) ) {
+ postCompileAddFunction(functionDescr);
+ }
}
}
// ensure that rules are ordered by dependency, so that dependent rules are built later
- sortRulesByDependency( packageDescr );
-
-
+ sortRulesByDependency(packageDescr);
// iterate and prepare RuleDescr
- for ( RuleDescr ruleDescr : packageDescr.getRules() ) {
- if ( isEmpty( ruleDescr.getNamespace() ) ) {
+ for (RuleDescr ruleDescr : packageDescr.getRules()) {
+ if (isEmpty(ruleDescr.getNamespace())) {
// make sure namespace is set on components
- ruleDescr.setNamespace( packageDescr.getNamespace() );
+ ruleDescr.setNamespace(packageDescr.getNamespace());
}
- Map<String, AttributeDescr> pkgAttributes = packageAttributes.get( packageDescr.getNamespace() );
- inheritPackageAttributes( pkgAttributes,
- ruleDescr );
+ Map<String, AttributeDescr> pkgAttributes = packageAttributes.get(packageDescr.getNamespace());
+ inheritPackageAttributes(pkgAttributes,
+ ruleDescr);
- if ( isEmpty( ruleDescr.getDialect() ) ) {
- ruleDescr.addAttribute( new AttributeDescr( "dialect",
- pkgRegistry.getDialect() ) );
+ if (isEmpty(ruleDescr.getDialect())) {
+ ruleDescr.addAttribute(new AttributeDescr("dialect",
+ pkgRegistry.getDialect()));
}
}
@@ -1066,32 +1075,54 @@ private void compileRules(PackageDescr packageDescr,
Map<String, RuleBuildContext> ruleCxts = preProcessRules(packageDescr, pkgRegistry);
// iterate and compile
- for ( RuleDescr ruleDescr : packageDescr.getRules() ) {
- addRule( ruleCxts.get( ruleDescr.getName() ) );
+ for (RuleDescr ruleDescr : packageDescr.getRules()) {
+ if (filterAccepts(ruleDescr.getNamespace(), ruleDescr.getName()) ) {
+ addRule(ruleCxts.get(ruleDescr.getName()));
+ }
}
}
+ private boolean filterAccepts( String namespace, String name ) {
+ return assetFilter == null || ! Action.DO_NOTHING.equals( assetFilter.accept( namespace, name ) );
+ }
+
+ private boolean filterAcceptsRemoval( String namespace, String name ) {
+ return assetFilter != null && Action.REMOVE.equals( assetFilter.accept( namespace, name ) );
+ }
+
private Map<String, RuleBuildContext> preProcessRules(PackageDescr packageDescr, PackageRegistry pkgRegistry) {
- Map<String, RuleBuildContext> ruleCxts = buildRuleBuilderContext( packageDescr.getRules() );
+ Map<String, RuleBuildContext> ruleCxts = buildRuleBuilderContext(packageDescr.getRules());
Package pkg = pkgRegistry.getPackage();
- if ( this.ruleBase != null ) {
+ if (this.ruleBase != null) {
+ // first, remove any rules that are no longer there
+ for( Rule rule : pkg.getRules() ) {
+ if (filterAcceptsRemoval( rule.getPackageName(), rule.getName() ) ) {
+ this.ruleBase.removeRule(pkg, pkg.getRule(rule.getName()));
+ pkg.removeRule(rule);
+ }
+ }
+
boolean needsRemoval = false;
- for ( RuleDescr ruleDescr : packageDescr.getRules() ) {
- if ( pkg.getRule( ruleDescr.getName() ) != null ) {
- needsRemoval = true;
- break;
+ for (RuleDescr ruleDescr : packageDescr.getRules()) {
+ if (filterAccepts(ruleDescr.getNamespace(), ruleDescr.getName()) ) {
+ if (pkg.getRule(ruleDescr.getName()) != null) {
+ needsRemoval = true;
+ break;
+ }
}
}
- if ( needsRemoval ) {
+ if (needsRemoval) {
try {
this.ruleBase.lock();
- for ( RuleDescr ruleDescr : packageDescr.getRules() ) {
- if ( pkg.getRule( ruleDescr.getName() ) != null ) {
- // XXX: this one notifies listeners
- this.ruleBase.removeRule( pkg, pkg.getRule( ruleDescr.getName() ) );
+ for (RuleDescr ruleDescr : packageDescr.getRules()) {
+ if (filterAccepts(ruleDescr.getNamespace(), ruleDescr.getName()) ) {
+ if (pkg.getRule(ruleDescr.getName()) != null) {
+ // XXX: this one notifies listeners
+ this.ruleBase.removeRule(pkg, pkg.getRule(ruleDescr.getName()));
+ }
}
}
} finally {
@@ -1101,10 +1132,12 @@ private Map<String, RuleBuildContext> preProcessRules(PackageDescr packageDescr,
}
// Pre Process each rule, needed for Query signuture registration
- for ( RuleDescr ruleDescr : packageDescr.getRules() ) {
- RuleBuildContext ruleBuildContext = ruleCxts.get( ruleDescr.getName() );
- ruleBuilder.preProcess( ruleBuildContext );
- pkg.addRule( ruleBuildContext.getRule() );
+ for (RuleDescr ruleDescr : packageDescr.getRules()) {
+ if (filterAccepts(ruleDescr.getNamespace(), ruleDescr.getName()) ) {
+ RuleBuildContext ruleBuildContext = ruleCxts.get(ruleDescr.getName());
+ ruleBuilder.preProcess(ruleBuildContext);
+ pkg.addRule(ruleBuildContext.getRule());
+ }
}
return ruleCxts;
}
@@ -1113,7 +1146,7 @@ private void sortRulesByDependency(PackageDescr packageDescr) {
// Using a topological sorting algorithm
// see http://en.wikipedia.org/wiki/Topological_sorting
- PackageRegistry pkgRegistry = this.pkgRegistryMap.get( packageDescr.getNamespace() );
+ PackageRegistry pkgRegistry = this.pkgRegistryMap.get(packageDescr.getNamespace());
Package pkg = pkgRegistry.getPackage();
List<RuleDescr> roots = new LinkedList<RuleDescr>();
@@ -1121,117 +1154,117 @@ private void sortRulesByDependency(PackageDescr packageDescr) {
LinkedHashMap<String, RuleDescr> sorted = new LinkedHashMap<String, RuleDescr>();
List<RuleDescr> queries = new ArrayList<RuleDescr>();
- for ( RuleDescr ruleDescr : packageDescr.getRules() ) {
- if ( ruleDescr.isQuery() ) {
+ for (RuleDescr ruleDescr : packageDescr.getRules()) {
+ if (ruleDescr.isQuery()) {
queries.add(ruleDescr);
- } else if ( !ruleDescr.hasParent() ) {
+ } else if (!ruleDescr.hasParent()) {
roots.add(ruleDescr);
- } else if ( pkg.getRule( ruleDescr.getParentName() ) != null ) {
+ } else if (pkg.getRule(ruleDescr.getParentName()) != null) {
// The parent of this rule has been already compiled
- sorted.put( ruleDescr.getName(), ruleDescr );
+ sorted.put(ruleDescr.getName(), ruleDescr);
} else {
- List<RuleDescr> childz = children.get( ruleDescr.getParentName() );
- if ( childz == null ) {
+ List<RuleDescr> childz = children.get(ruleDescr.getParentName());
+ if (childz == null) {
childz = new ArrayList<RuleDescr>();
- children.put( ruleDescr.getParentName(), childz );
+ children.put(ruleDescr.getParentName(), childz);
}
- childz.add( ruleDescr );
+ childz.add(ruleDescr);
}
}
- if ( children.isEmpty() ) { // Sorting not necessary
- if ( !queries.isEmpty() ) { // Build all queries first
+ if (children.isEmpty()) { // Sorting not necessary
+ if (!queries.isEmpty()) { // Build all queries first
packageDescr.getRules().removeAll(queries);
packageDescr.getRules().addAll(0, queries);
}
return;
}
- while ( !roots.isEmpty() ) {
- RuleDescr root = roots.remove( 0 );
- sorted.put( root.getName(), root );
- List<RuleDescr> childz = children.remove( root.getName() );
- if ( childz != null ) {
- roots.addAll( childz );
+ while (!roots.isEmpty()) {
+ RuleDescr root = roots.remove(0);
+ sorted.put(root.getName(), root);
+ List<RuleDescr> childz = children.remove(root.getName());
+ if (childz != null) {
+ roots.addAll(childz);
}
}
- reportHierarchyErrors( children, sorted );
+ reportHierarchyErrors(children, sorted);
packageDescr.getRules().clear();
packageDescr.getRules().addAll(queries);
- for ( RuleDescr descr : sorted.values() ) {
- packageDescr.getRules().add( descr );
+ for (RuleDescr descr : sorted.values()) {
+ packageDescr.getRules().add(descr);
}
}
private void reportHierarchyErrors(Map<String, List<RuleDescr>> parents,
- Map<String, RuleDescr> sorted) {
+ Map<String, RuleDescr> sorted) {
boolean circularDep = false;
- for ( List<RuleDescr> rds : parents.values() ) {
- for ( RuleDescr ruleDescr : rds ) {
- if ( parents.get( ruleDescr.getParentName() ) != null
- && (sorted.containsKey( ruleDescr.getName() ) || parents.containsKey( ruleDescr.getName() )) ) {
+ for (List<RuleDescr> rds : parents.values()) {
+ for (RuleDescr ruleDescr : rds) {
+ if (parents.get(ruleDescr.getParentName()) != null
+ && (sorted.containsKey(ruleDescr.getName()) || parents.containsKey(ruleDescr.getName()))) {
circularDep = true;
- results.add( new RuleBuildError( new Rule( ruleDescr.getName() ), ruleDescr, null,
- "Circular dependency in rules hierarchy" ) );
+ results.add(new RuleBuildError(new Rule(ruleDescr.getName()), ruleDescr, null,
+ "Circular dependency in rules hierarchy"));
break;
}
- manageUnresolvedExtension( ruleDescr, sorted.values() );
+ manageUnresolvedExtension(ruleDescr, sorted.values());
}
- if ( circularDep ) {
+ if (circularDep) {
break;
}
}
}
private void manageUnresolvedExtension(RuleDescr ruleDescr,
- Collection<RuleDescr> candidates) {
+ Collection<RuleDescr> candidates) {
List<String> candidateRules = new LinkedList<String>();
- for ( RuleDescr r : candidates ) {
- if ( StringUtils.stringSimilarity( ruleDescr.getParentName(), r.getName(), StringUtils.SIMILARITY_STRATS.DICE ) >= 0.75 ) {
- candidateRules.add( r.getName() );
+ for (RuleDescr r : candidates) {
+ if (StringUtils.stringSimilarity(ruleDescr.getParentName(), r.getName(), StringUtils.SIMILARITY_STRATS.DICE) >= 0.75) {
+ candidateRules.add(r.getName());
}
}
String msg = "Unresolved parent name " + ruleDescr.getParentName();
- if ( candidateRules.size() > 0 ) {
+ if (candidateRules.size() > 0) {
msg += " >> did you mean any of :" + candidateRules;
}
- results.add( new RuleBuildError( new Rule( ruleDescr.getName() ), ruleDescr, msg,
- "Unable to resolve parent rule, please check that both rules are in the same package" ) );
+ results.add(new RuleBuildError(new Rule(ruleDescr.getName()), ruleDescr, msg,
+ "Unable to resolve parent rule, please check that both rules are in the same package"));
}
private void initPackage(PackageDescr packageDescr) {
//Gather all imports for all PackageDescrs for the current package and replicate into
//all PackageDescrs for the current package, thus maintaining a complete list of
//ImportDescrs for all PackageDescrs for the current package.
- List<PackageDescr> packageDescrsForPackage = packages.get( packageDescr.getName() );
- if ( packageDescrsForPackage == null ) {
+ List<PackageDescr> packageDescrsForPackage = packages.get(packageDescr.getName());
+ if (packageDescrsForPackage == null) {
packageDescrsForPackage = new ArrayList<PackageDescr>();
- packages.put( packageDescr.getName(),
- packageDescrsForPackage );
+ packages.put(packageDescr.getName(),
+ packageDescrsForPackage);
}
- packageDescrsForPackage.add( packageDescr );
+ packageDescrsForPackage.add(packageDescr);
Set<ImportDescr> imports = new HashSet<ImportDescr>();
- for ( PackageDescr pd : packageDescrsForPackage ) {
- imports.addAll( pd.getImports() );
+ for (PackageDescr pd : packageDescrsForPackage) {
+ imports.addAll(pd.getImports());
}
- for ( PackageDescr pd : packageDescrsForPackage ) {
+ for (PackageDescr pd : packageDescrsForPackage) {
pd.getImports().clear();
- pd.addAllImports( imports );
+ pd.addAllImports(imports);
}
//Copy package level attributes for inclusion on individual rules
- if ( !packageDescr.getAttributes().isEmpty() ) {
- Map<String, AttributeDescr> pkgAttributes = packageAttributes.get( packageDescr.getNamespace() );
- if ( pkgAttributes == null ) {
+ if (!packageDescr.getAttributes().isEmpty()) {
+ Map<String, AttributeDescr> pkgAttributes = packageAttributes.get(packageDescr.getNamespace());
+ if (pkgAttributes == null) {
pkgAttributes = new HashMap<String, AttributeDescr>();
- this.packageAttributes.put( packageDescr.getNamespace(),
- pkgAttributes );
+ this.packageAttributes.put(packageDescr.getNamespace(),
+ pkgAttributes);
}
- for ( AttributeDescr attr : packageDescr.getAttributes() ) {
- pkgAttributes.put( attr.getName(),
- attr );
+ for (AttributeDescr attr : packageDescr.getAttributes()) {
+ pkgAttributes.put(attr.getName(),
+ attr);
}
}
}
@@ -1239,8 +1272,8 @@ private void initPackage(PackageDescr packageDescr) {
private String getPackageDialect(PackageDescr packageDescr) {
String dialectName = this.defaultDialect;
// see if this packageDescr overrides the current default dialect
- for ( AttributeDescr value : packageDescr.getAttributes() ) {
- if ( "dialect".equals( value.getName() ) ) {
+ for (AttributeDescr value : packageDescr.getAttributes()) {
+ if ("dialect".equals(value.getName())) {
dialectName = value.getValue();
break;
}
@@ -1254,8 +1287,9 @@ private String getPackageDialect(PackageDescr packageDescr) {
* This checks to see if it should all be in the one namespace.
*/
private boolean checkNamespace(String newName) {
- if ( this.configuration == null ) return true;
- if ( (!this.pkgRegistryMap.isEmpty()) && (!this.pkgRegistryMap.containsKey( newName )) ) {
+ if (this.configuration == null)
+ return true;
+ if ((!this.pkgRegistryMap.isEmpty()) && (!this.pkgRegistryMap.containsKey(newName))) {
return this.configuration.isAllowMultipleNamespaces();
}
return true;
@@ -1267,80 +1301,80 @@ public boolean isEmpty(String string) {
public void updateResults() {
// some of the rules and functions may have been redefined
- updateResults( this.results );
+ updateResults(this.results);
}
public void updateResults(List<KnowledgeBuilderResult> results) {
- this.results = getResults( results );
+ this.results = getResults(results);
}
public void compileAll() {
- for ( PackageRegistry pkgRegistry : this.pkgRegistryMap.values() ) {
+ for (PackageRegistry pkgRegistry : this.pkgRegistryMap.values()) {
pkgRegistry.compileAll();
}
}
public void reloadAll() {
- for ( PackageRegistry pkgRegistry : this.pkgRegistryMap.values() ) {
+ for (PackageRegistry pkgRegistry : this.pkgRegistryMap.values()) {
pkgRegistry.getDialectRuntimeRegistry().onBeforeExecute();
}
}
private List<KnowledgeBuilderResult> getResults(List<KnowledgeBuilderResult> results) {
- for ( PackageRegistry pkgRegistry : this.pkgRegistryMap.values() ) {
- results = pkgRegistry.getDialectCompiletimeRegistry().addResults( results );
+ for (PackageRegistry pkgRegistry : this.pkgRegistryMap.values()) {
+ results = pkgRegistry.getDialectCompiletimeRegistry().addResults(results);
}
return results;
}
public synchronized void addPackage(final Package newPkg) {
- PackageRegistry pkgRegistry = this.pkgRegistryMap.get( newPkg.getName() );
+ PackageRegistry pkgRegistry = this.pkgRegistryMap.get(newPkg.getName());
Package pkg = null;
- if ( pkgRegistry != null ) {
+ if (pkgRegistry != null) {
pkg = pkgRegistry.getPackage();
}
- if ( pkg == null ) {
- PackageDescr packageDescr = new PackageDescr( newPkg.getName() );
- pkgRegistry = newPackage( packageDescr );
- mergePackage( this.pkgRegistryMap.get( packageDescr.getNamespace() ), packageDescr );
+ if (pkg == null) {
+ PackageDescr packageDescr = new PackageDescr(newPkg.getName());
+ pkgRegistry = newPackage(packageDescr);
+ mergePackage(this.pkgRegistryMap.get(packageDescr.getNamespace()), packageDescr);
pkg = pkgRegistry.getPackage();
}
// first merge anything related to classloader re-wiring
- pkg.getDialectRuntimeRegistry().merge( newPkg.getDialectRuntimeRegistry(),
- this.rootClassLoader );
- if ( newPkg.getFunctions() != null ) {
- for ( Map.Entry<String, Function> entry : newPkg.getFunctions().entrySet() ) {
- if ( pkg.getFunctions().containsKey( entry.getKey() ) ) {
- this.results.add( new DuplicateFunction( entry.getValue(),
- this.configuration ) );
+ pkg.getDialectRuntimeRegistry().merge(newPkg.getDialectRuntimeRegistry(),
+ this.rootClassLoader);
+ if (newPkg.getFunctions() != null) {
+ for (Map.Entry<String, Function> entry : newPkg.getFunctions().entrySet()) {
+ if (pkg.getFunctions().containsKey(entry.getKey())) {
+ this.results.add(new DuplicateFunction(entry.getValue(),
+ this.configuration));
}
- pkg.addFunction( entry.getValue() );
+ pkg.addFunction(entry.getValue());
}
}
- pkg.getClassFieldAccessorStore().merge( newPkg.getClassFieldAccessorStore() );
+ pkg.getClassFieldAccessorStore().merge(newPkg.getClassFieldAccessorStore());
pkg.getDialectRuntimeRegistry().onBeforeExecute();
// we have to do this before the merging, as it does some classloader resolving
TypeDeclaration lastType = null;
try {
// Resolve the class for the type declaation
- if ( newPkg.getTypeDeclarations() != null ) {
+ if (newPkg.getTypeDeclarations() != null) {
// add type declarations
- for ( TypeDeclaration type : newPkg.getTypeDeclarations().values() ) {
+ for (TypeDeclaration type : newPkg.getTypeDeclarations().values()) {
lastType = type;
- type.setTypeClass( this.rootClassLoader.loadClass( type.getTypeClassName() ) );
+ type.setTypeClass(this.rootClassLoader.loadClass(type.getTypeClassName()));
}
}
- } catch ( ClassNotFoundException e ) {
- throw new RuntimeDroolsException( "unable to resolve Type Declaration class '" + lastType.getTypeName() +
- "'" );
+ } catch (ClassNotFoundException e) {
+ throw new RuntimeDroolsException("unable to resolve Type Declaration class '" + lastType.getTypeName() +
+ "'");
}
// now merge the new package into the existing one
- mergePackage( pkg,
- newPkg );
+ mergePackage(pkg,
+ newPkg);
}
@@ -1351,59 +1385,59 @@ public synchronized void addPackage(final Package newPkg) {
* into the package).
*/
private void mergePackage(final Package pkg,
- final Package newPkg) {
+ final Package newPkg) {
// Merge imports
final Map<String, ImportDeclaration> imports = pkg.getImports();
- imports.putAll( newPkg.getImports() );
+ imports.putAll(newPkg.getImports());
String lastType = null;
try {
// merge globals
- if ( newPkg.getGlobals() != null && newPkg.getGlobals() != Collections.EMPTY_MAP ) {
+ if (newPkg.getGlobals() != null && newPkg.getGlobals() != Collections.EMPTY_MAP) {
Map<String, String> globals = pkg.getGlobals();
// Add globals
- for ( final Map.Entry<String, String> entry : newPkg.getGlobals().entrySet() ) {
+ for (final Map.Entry<String, String> entry : newPkg.getGlobals().entrySet()) {
final String identifier = entry.getKey();
final String type = entry.getValue();
lastType = type;
- if ( globals.containsKey( identifier ) && !globals.get( identifier ).equals( type ) ) {
- throw new PackageIntegrationException( pkg );
+ if (globals.containsKey(identifier) && !globals.get(identifier).equals(type)) {
+ throw new PackageIntegrationException(pkg);
} else {
- pkg.addGlobal( identifier,
- this.rootClassLoader.loadClass( type ) );
+ pkg.addGlobal(identifier,
+ this.rootClassLoader.loadClass(type));
// this isn't a package merge, it's adding to the rulebase, but I've put it here for convenience
- this.globals.put( identifier,
- this.rootClassLoader.loadClass( type ) );
+ this.globals.put(identifier,
+ this.rootClassLoader.loadClass(type));
}
}
}
- } catch ( ClassNotFoundException e ) {
- throw new RuntimeDroolsException( "Unable to resolve class '" + lastType + "'" );
+ } catch (ClassNotFoundException e) {
+ throw new RuntimeDroolsException("Unable to resolve class '" + lastType + "'");
}
// merge the type declarations
- if ( newPkg.getTypeDeclarations() != null ) {
+ if (newPkg.getTypeDeclarations() != null) {
// add type declarations
- for ( TypeDeclaration type : newPkg.getTypeDeclarations().values() ) {
+ for (TypeDeclaration type : newPkg.getTypeDeclarations().values()) {
// @TODO should we allow overrides? only if the class is not in use.
- if ( !pkg.getTypeDeclarations().containsKey( type.getTypeName() ) ) {
+ if (!pkg.getTypeDeclarations().containsKey(type.getTypeName())) {
// add to package list of type declarations
- pkg.addTypeDeclaration( type );
+ pkg.addTypeDeclaration(type);
}
}
}
final Rule[] newRules = newPkg.getRules();
- for ( final Rule newRule : newRules ) {
- pkg.addRule( newRule );
+ for (final Rule newRule : newRules) {
+ pkg.addRule(newRule);
}
//Merge The Rule Flows
- if ( newPkg.getRuleFlows() != null ) {
+ if (newPkg.getRuleFlows() != null) {
final Map flows = newPkg.getRuleFlows();
- for ( Object o : flows.values() ) {
+ for (Object o : flows.values()) {
final Process flow = (Process) o;
- pkg.addProcess( flow );
+ pkg.addProcess(flow);
}
}
@@ -1422,166 +1456,166 @@ private void mergePackage(final Package pkg,
private void validateUniqueRuleNames(final PackageDescr packageDescr) {
final Set<String> names = new HashSet<String>();
- PackageRegistry packageRegistry = this.pkgRegistryMap.get( packageDescr.getNamespace() );
+ PackageRegistry packageRegistry = this.pkgRegistryMap.get(packageDescr.getNamespace());
Package pkg = null;
- if ( packageRegistry != null ) {
+ if (packageRegistry != null) {
pkg = packageRegistry.getPackage();
}
- for ( final RuleDescr rule : packageDescr.getRules() ) {
- validateRule( packageDescr, rule );
+ for (final RuleDescr rule : packageDescr.getRules()) {
+ validateRule(packageDescr, rule);
final String name = rule.getName();
- if ( names.contains( name ) ) {
- this.results.add( new ParserError( rule.getResource(),
- "Duplicate rule name: " + name,
- rule.getLine(),
- rule.getColumn(),
- packageDescr.getNamespace() ) );
- }
- if ( pkg != null ) {
- Rule duplicatedRule = pkg.getRule( name );
- if ( duplicatedRule != null ) {
+ if (names.contains(name)) {
+ this.results.add(new ParserError(rule.getResource(),
+ "Duplicate rule name: " + name,
+ rule.getLine(),
+ rule.getColumn(),
+ packageDescr.getNamespace()));
+ }
+ if (pkg != null) {
+ Rule duplicatedRule = pkg.getRule(name);
+ if (duplicatedRule != null) {
Resource resource = rule.getResource();
Resource duplicatedResource = duplicatedRule.getResource();
- if ( resource == null || duplicatedResource == null || duplicatedResource.getSourcePath() == null ||
- duplicatedResource.getSourcePath().equals( resource.getSourcePath() ) ) {
- this.results.add( new DuplicateRule( rule,
- packageDescr,
- this.configuration ) );
+ if (resource == null || duplicatedResource == null || duplicatedResource.getSourcePath() == null ||
+ duplicatedResource.getSourcePath().equals(resource.getSourcePath())) {
+ this.results.add(new DuplicateRule(rule,
+ packageDescr,
+ this.configuration));
} else {
- this.results.add( new ParserError( rule.getResource(),
- "Duplicate rule name: " + name,
- rule.getLine(),
- rule.getColumn(),
- packageDescr.getNamespace() ) );
+ this.results.add(new ParserError(rule.getResource(),
+ "Duplicate rule name: " + name,
+ rule.getLine(),
+ rule.getColumn(),
+ packageDescr.getNamespace()));
}
}
}
- names.add( name );
+ names.add(name);
}
}
private void validateRule(PackageDescr packageDescr,
- RuleDescr rule) {
- if ( rule.hasErrors() ) {
- for ( String error : rule.getErrors() ) {
- this.results.add( new ParserError( rule.getResource(),
- error + " in rule " + rule.getName(),
- rule.getLine(),
- rule.getColumn(),
- packageDescr.getNamespace() ) );
+ RuleDescr rule) {
+ if (rule.hasErrors()) {
+ for (String error : rule.getErrors()) {
+ this.results.add(new ParserError(rule.getResource(),
+ error + " in rule " + rule.getName(),
+ rule.getLine(),
+ rule.getColumn(),
+ packageDescr.getNamespace()));
}
}
}
private PackageRegistry newPackage(final PackageDescr packageDescr) {
Package pkg;
- if ( this.ruleBase == null || (pkg = this.ruleBase.getPackage( packageDescr.getName() )) == null ) {
+ if (this.ruleBase == null || (pkg = this.ruleBase.getPackage(packageDescr.getName())) == null) {
// there is no rulebase or it does not define this package so define it
- pkg = new Package( packageDescr.getName() );
- pkg.setClassFieldAccessorCache( new ClassFieldAccessorCache( this.rootClassLoader ) );
+ pkg = new Package(packageDescr.getName());
+ pkg.setClassFieldAccessorCache(new ClassFieldAccessorCache(this.rootClassLoader));
// if there is a rulebase then add the package.
- if ( this.ruleBase != null ) {
+ if (this.ruleBase != null) {
// Must lock here, otherwise the assumption about addPackage/getPackage behavior below might be violated
this.ruleBase.lock();
try {
- this.ruleBase.addPackage( pkg );
- pkg = this.ruleBase.getPackage( packageDescr.getName() );
+ this.ruleBase.addPackage(pkg);
+ pkg = this.ruleBase.getPackage(packageDescr.getName());
} finally {
this.ruleBase.unlock();
}
} else {
// the RuleBase will also initialise the
- pkg.getDialectRuntimeRegistry().onAdd( this.rootClassLoader );
+ pkg.getDialectRuntimeRegistry().onAdd(this.rootClassLoader);
}
}
- PackageRegistry pkgRegistry = new PackageRegistry( this,
- pkg );
+ PackageRegistry pkgRegistry = new PackageRegistry(this,
+ pkg);
// add default import for this namespace
- pkgRegistry.addImport( new ImportDescr( packageDescr.getNamespace() + ".*" ) );
+ pkgRegistry.addImport(new ImportDescr(packageDescr.getNamespace() + ".*"));
- this.pkgRegistryMap.put( packageDescr.getName(),
- pkgRegistry );
+ this.pkgRegistryMap.put(packageDescr.getName(),
+ pkgRegistry);
return pkgRegistry;
}
private void mergePackage(PackageRegistry pkgRegistry,
- PackageDescr packageDescr) {
- for ( final ImportDescr importDescr : packageDescr.getImports() ) {
- pkgRegistry.addImport( importDescr );
+ PackageDescr packageDescr) {
+ for (final ImportDescr importDescr : packageDescr.getImports()) {
+ pkgRegistry.addImport(importDescr);
}
- processEntryPointDeclarations( pkgRegistry, packageDescr );
+ processEntryPointDeclarations(pkgRegistry, packageDescr);
// process types in 2 steps to deal with circular and recursive declarations
- processUnresolvedTypes( pkgRegistry, processTypeDeclarations( pkgRegistry, packageDescr, new ArrayList<TypeDefinition>() ) );
+ processUnresolvedTypes(pkgRegistry, processTypeDeclarations(pkgRegistry, packageDescr, new ArrayList<TypeDefinition>()));
- processOtherDeclarations( pkgRegistry, packageDescr );
+ processOtherDeclarations(pkgRegistry, packageDescr);
}
void processOtherDeclarations(PackageRegistry pkgRegistry,
- PackageDescr packageDescr) {
- processWindowDeclarations( pkgRegistry, packageDescr );
- processFunctions( pkgRegistry, packageDescr );
- processGlobals( pkgRegistry, packageDescr );
+ PackageDescr packageDescr) {
+ processWindowDeclarations(pkgRegistry, packageDescr);
+ processFunctions(pkgRegistry, packageDescr);
+ processGlobals(pkgRegistry, packageDescr);
// need to reinsert this to ensure that the package is the first/last one in the ordered map
// this feature is exploited by the knowledgeAgent
Package current = getPackage();
- this.pkgRegistryMap.remove( packageDescr.getName() );
- this.pkgRegistryMap.put( packageDescr.getName(), pkgRegistry );
- if ( !current.getName().equals( packageDescr.getName() ) ) {
+ this.pkgRegistryMap.remove(packageDescr.getName());
+ this.pkgRegistryMap.put(packageDescr.getName(), pkgRegistry);
+ if (!current.getName().equals(packageDescr.getName())) {
currentRulePackage = pkgRegistryMap.size() - 1;
}
}
private void processGlobals(PackageRegistry pkgRegistry,
- PackageDescr packageDescr) {
- for ( final GlobalDescr global : packageDescr.getGlobals() ) {
+ PackageDescr packageDescr) {
+ for (final GlobalDescr global : packageDescr.getGlobals()) {
final String identifier = global.getIdentifier();
String className = global.getType();
// JBRULES-3039: can't handle type name with generic params
- while ( className.indexOf( '<' ) >= 0 ) {
- className = className.replaceAll( "<[^<>]+?>", "" );
+ while (className.indexOf('<') >= 0) {
+ className = className.replaceAll("<[^<>]+?>", "");
}
try {
- Class< ? > clazz = pkgRegistry.getTypeResolver().resolveType( className );
- if ( clazz.isPrimitive() ) {
- this.results.add( new GlobalError( global, " Primitive types are not allowed in globals : " + className ) );
+ Class<?> clazz = pkgRegistry.getTypeResolver().resolveType(className);
+ if (clazz.isPrimitive()) {
+ this.results.add(new GlobalError(global, " Primitive types are not allowed in globals : " + className));
return;
}
- pkgRegistry.getPackage().addGlobal( identifier,
- clazz );
- this.globals.put( identifier,
- clazz );
- } catch ( final ClassNotFoundException e ) {
- this.results.add( new GlobalError( global, e.getMessage() ) );
+ pkgRegistry.getPackage().addGlobal(identifier,
+ clazz);
+ this.globals.put(identifier,
+ clazz);
+ } catch (final ClassNotFoundException e) {
+ this.results.add(new GlobalError(global, e.getMessage()));
e.printStackTrace();
}
}
}
private void processFunctions(PackageRegistry pkgRegistry,
- PackageDescr packageDescr) {
- for ( FunctionDescr function : packageDescr.getFunctions() ) {
- Function existingFunc = pkgRegistry.getPackage().getFunctions().get( function.getName() );
- if ( existingFunc != null && function.getNamespace().equals( existingFunc.getNamespace() ) ) {
+ PackageDescr packageDescr) {
+ for (FunctionDescr function : packageDescr.getFunctions()) {
+ Function existingFunc = pkgRegistry.getPackage().getFunctions().get(function.getName());
+ if (existingFunc != null && function.getNamespace().equals(existingFunc.getNamespace())) {
this.results.add(
- new DuplicateFunction( function,
- this.configuration ) );
+ new DuplicateFunction(function,
+ this.configuration));
}
}
- for ( final FunctionImportDescr functionImport : packageDescr.getFunctionImports() ) {
+ for (final FunctionImportDescr functionImport : packageDescr.getFunctionImports()) {
String importEntry = functionImport.getTarget();
- pkgRegistry.addStaticImport( functionImport );
- pkgRegistry.getPackage().addStaticImport( importEntry );
+ pkgRegistry.addStaticImport(functionImport);
+ pkgRegistry.getPackage().addStaticImport(importEntry);
}
}
@@ -1597,212 +1631,213 @@ void processUnresolvedType(PackageRegistry pkgRegistry, TypeDefinition unresolve
processTypeFields(pkgRegistry, unresolvedTypeDefinition.typeDescr, unresolvedTypeDefinition.type, false);
}
- public TypeDeclaration getAndRegisterTypeDeclaration( Class<?> cls, String packageName ) {
+ public TypeDeclaration getAndRegisterTypeDeclaration(Class<?> cls, String packageName) {
if (cls.isPrimitive() || cls.isArray()) {
return null;
}
- TypeDeclaration typeDeclaration = getCachedTypeDeclaration( cls );
- if ( typeDeclaration != null ) {
- registerTypeDeclaration( packageName, typeDeclaration );
+ TypeDeclaration typeDeclaration = getCachedTypeDeclaration(cls);
+ if (typeDeclaration != null) {
+ registerTypeDeclaration(packageName, typeDeclaration);
return typeDeclaration;
}
- typeDeclaration = getExistingTypeDeclaration( cls );
- if ( typeDeclaration != null ) {
- initTypeDeclaration( cls, typeDeclaration );
+ typeDeclaration = getExistingTypeDeclaration(cls);
+ if (typeDeclaration != null) {
+ initTypeDeclaration(cls, typeDeclaration);
return typeDeclaration;
}
- typeDeclaration = createTypeDeclarationForBean( cls );
- initTypeDeclaration( cls, typeDeclaration );
- registerTypeDeclaration( packageName, typeDeclaration );
+ typeDeclaration = createTypeDeclarationForBean(cls);
+ initTypeDeclaration(cls, typeDeclaration);
+ registerTypeDeclaration(packageName, typeDeclaration);
return typeDeclaration;
}
private void registerTypeDeclaration(String packageName,
- TypeDeclaration typeDeclaration) {
- if ( typeDeclaration.getNature() == TypeDeclaration.Nature.DECLARATION || packageName.equals( typeDeclaration.getTypeClass().getPackage().getName() ) ) {
- PackageRegistry packageRegistry = pkgRegistryMap.get( packageName );
- if ( packageRegistry != null ) {
- packageRegistry.getPackage().addTypeDeclaration( typeDeclaration );
+ TypeDeclaration typeDeclaration) {
+ if (typeDeclaration.getNature() == TypeDeclaration.Nature.DECLARATION || packageName.equals(typeDeclaration.getTypeClass().getPackage().getName())) {
+ PackageRegistry packageRegistry = pkgRegistryMap.get(packageName);
+ if (packageRegistry != null) {
+ packageRegistry.getPackage().addTypeDeclaration(typeDeclaration);
} else {
- newPackage( new PackageDescr( packageName, "" ) );
- pkgRegistryMap.get( packageName ).getPackage().addTypeDeclaration( typeDeclaration );
+ newPackage(new PackageDescr(packageName, ""));
+ pkgRegistryMap.get(packageName).getPackage().addTypeDeclaration(typeDeclaration);
}
}
}
- public TypeDeclaration getTypeDeclaration(Class< ? > cls) {
- if ( cls.isPrimitive() || cls.isArray() ) return null;
+ public TypeDeclaration getTypeDeclaration(Class<?> cls) {
+ if (cls.isPrimitive() || cls.isArray())
+ return null;
// If this class has already been accessed, it'll be in the cache
- TypeDeclaration tdecl = getCachedTypeDeclaration( cls );
- return tdecl != null ? tdecl : createTypeDeclaration( cls );
+ TypeDeclaration tdecl = getCachedTypeDeclaration(cls);
+ return tdecl != null ? tdecl : createTypeDeclaration(cls);
}
- private TypeDeclaration createTypeDeclaration(Class< ? > cls) {
- TypeDeclaration typeDeclaration = getExistingTypeDeclaration( cls );
+ private TypeDeclaration createTypeDeclaration(Class<?> cls) {
+ TypeDeclaration typeDeclaration = getExistingTypeDeclaration(cls);
- if ( typeDeclaration == null ) {
- typeDeclaration = createTypeDeclarationForBean( cls );
+ if (typeDeclaration == null) {
+ typeDeclaration = createTypeDeclarationForBean(cls);
}
- initTypeDeclaration( cls, typeDeclaration );
+ initTypeDeclaration(cls, typeDeclaration);
return typeDeclaration;
}
- private TypeDeclaration getCachedTypeDeclaration(Class< ? > cls) {
- if ( this.cacheTypes == null ) {
+ private TypeDeclaration getCachedTypeDeclaration(Class<?> cls) {
+ if (this.cacheTypes == null) {
this.cacheTypes = new HashMap<String, TypeDeclaration>();
return null;
} else {
- return cacheTypes.get( cls.getName() );
+ return cacheTypes.get(cls.getName());
}
}
- private TypeDeclaration getExistingTypeDeclaration(Class< ? > cls) {
+ private TypeDeclaration getExistingTypeDeclaration(Class<?> cls) {
// Check if we are in the built-ins
- TypeDeclaration typeDeclaration = this.builtinTypes.get( (cls.getName()) );
- if ( typeDeclaration == null ) {
+ TypeDeclaration typeDeclaration = this.builtinTypes.get((cls.getName()));
+ if (typeDeclaration == null) {
// No built-in
// Check if there is a user specified typedeclr
- PackageRegistry pkgReg = this.pkgRegistryMap.get( ClassUtils.getPackage( cls ) );
- if ( pkgReg != null ) {
+ PackageRegistry pkgReg = this.pkgRegistryMap.get(ClassUtils.getPackage(cls));
+ if (pkgReg != null) {
String className = cls.getName();
- String typeName = className.substring( className.lastIndexOf( "." ) + 1 );
- typeDeclaration = pkgReg.getPackage().getTypeDeclaration( typeName );
+ String typeName = className.substring(className.lastIndexOf(".") + 1);
+ typeDeclaration = pkgReg.getPackage().getTypeDeclaration(typeName);
}
}
return typeDeclaration;
}
- private void initTypeDeclaration(Class< ? > cls,
- TypeDeclaration typeDeclaration) {
+ private void initTypeDeclaration(Class<?> cls,
+ TypeDeclaration typeDeclaration) {
ClassDefinition clsDef = typeDeclaration.getTypeClassDef();
- if ( clsDef == null ) {
+ if (clsDef == null) {
clsDef = new ClassDefinition();
- typeDeclaration.setTypeClassDef( clsDef );
+ typeDeclaration.setTypeClassDef(clsDef);
}
- if ( typeDeclaration.isPropertyReactive() ) {
- processModifiedProps( cls, clsDef );
+ if (typeDeclaration.isPropertyReactive()) {
+ processModifiedProps(cls, clsDef);
}
- processFieldsPosition( cls, clsDef );
+ processFieldsPosition(cls, clsDef);
// build up a set of all the super classes and interfaces
Set<TypeDeclaration> tdecls = new LinkedHashSet<TypeDeclaration>();
- tdecls.add( typeDeclaration );
- buildTypeDeclarations( cls,
- tdecls );
+ tdecls.add(typeDeclaration);
+ buildTypeDeclarations(cls,
+ tdecls);
// Iterate and for each typedeclr assign it's value if it's not already set
// We start from the rear as those are the furthest away classes and interfaces
- TypeDeclaration[] tarray = tdecls.toArray( new TypeDeclaration[tdecls.size()] );
- for ( int i = tarray.length - 1; i >= 0; i-- ) {
+ TypeDeclaration[] tarray = tdecls.toArray(new TypeDeclaration[tdecls.size()]);
+ for (int i = tarray.length - 1; i >= 0; i--) {
TypeDeclaration currentTDecl = tarray[i];
- if ( !isSet( typeDeclaration.getSetMask(),
- TypeDeclaration.ROLE_BIT ) && isSet( currentTDecl.getSetMask(),
- TypeDeclaration.ROLE_BIT ) ) {
- typeDeclaration.setRole( currentTDecl.getRole() );
+ if (!isSet(typeDeclaration.getSetMask(),
+ TypeDeclaration.ROLE_BIT) && isSet(currentTDecl.getSetMask(),
+ TypeDeclaration.ROLE_BIT)) {
+ typeDeclaration.setRole(currentTDecl.getRole());
}
- if ( !isSet( typeDeclaration.getSetMask(),
- TypeDeclaration.FORMAT_BIT ) && isSet( currentTDecl.getSetMask(),
- TypeDeclaration.FORMAT_BIT ) ) {
- typeDeclaration.setFormat( currentTDecl.getFormat() );
+ if (!isSet(typeDeclaration.getSetMask(),
+ TypeDeclaration.FORMAT_BIT) && isSet(currentTDecl.getSetMask(),
+ TypeDeclaration.FORMAT_BIT)) {
+ typeDeclaration.setFormat(currentTDecl.getFormat());
}
- if ( !isSet( typeDeclaration.getSetMask(),
- TypeDeclaration.TYPESAFE_BIT ) && isSet( currentTDecl.getSetMask(),
- TypeDeclaration.TYPESAFE_BIT ) ) {
- typeDeclaration.setTypesafe( currentTDecl.isTypesafe() );
+ if (!isSet(typeDeclaration.getSetMask(),
+ TypeDeclaration.TYPESAFE_BIT) && isSet(currentTDecl.getSetMask(),
+ TypeDeclaration.TYPESAFE_BIT)) {
+ typeDeclaration.setTypesafe(currentTDecl.isTypesafe());
}
}
- this.cacheTypes.put( cls.getName(),
- typeDeclaration );
+ this.cacheTypes.put(cls.getName(),
+ typeDeclaration);
}
- private TypeDeclaration createTypeDeclarationForBean(Class< ? > cls) {
- TypeDeclaration typeDeclaration = new TypeDeclaration( cls );
+ private TypeDeclaration createTypeDeclarationForBean(Class<?> cls) {
+ TypeDeclaration typeDeclaration = new TypeDeclaration(cls);
- PropertySpecificOption propertySpecificOption = configuration.getOption( PropertySpecificOption.class );
- boolean propertyReactive = propertySpecificOption.isPropSpecific( cls.isAnnotationPresent( PropertyReactive.class ),
- cls.isAnnotationPresent( ClassReactive.class ) );
+ PropertySpecificOption propertySpecificOption = configuration.getOption(PropertySpecificOption.class);
+ boolean propertyReactive = propertySpecificOption.isPropSpecific(cls.isAnnotationPresent(PropertyReactive.class),
+ cls.isAnnotationPresent(ClassReactive.class));
- setPropertyReactive( null, typeDeclaration, propertyReactive );
+ setPropertyReactive(null, typeDeclaration, propertyReactive);
- Role role = cls.getAnnotation( Role.class );
- if ( role != null && role.value() == Role.Type.EVENT) {
+ Role role = cls.getAnnotation(Role.class);
+ if (role != null && role.value() == Role.Type.EVENT) {
typeDeclaration.setRole(TypeDeclaration.Role.EVENT);
}
return typeDeclaration;
}
- private void processModifiedProps(Class< ? > cls,
- ClassDefinition clsDef) {
- for ( Method method : cls.getDeclaredMethods() ) {
- Modifies modifies = method.getAnnotation( Modifies.class );
- if ( modifies != null ) {
+ private void processModifiedProps(Class<?> cls,
+ ClassDefinition clsDef) {
+ for (Method method : cls.getDeclaredMethods()) {
+ Modifies modifies = method.getAnnotation(Modifies.class);
+ if (modifies != null) {
String[] props = modifies.value();
- List<String> properties = new ArrayList<String>( props.length );
- for ( String prop : props ) {
- properties.add( prop.trim() );
+ List<String> properties = new ArrayList<String>(props.length);
+ for (String prop : props) {
+ properties.add(prop.trim());
}
- clsDef.addModifiedPropsByMethod( method,
- properties );
+ clsDef.addModifiedPropsByMethod(method,
+ properties);
}
}
}
- private void processFieldsPosition(Class< ? > cls,
- ClassDefinition clsDef) {
+ private void processFieldsPosition(Class<?> cls,
+ ClassDefinition clsDef) {
// it's a new type declaration, so generate the @Position for it
Collection<Field> fields = new LinkedList<Field>();
- Class< ? > tempKlass = cls;
- while ( tempKlass != null && tempKlass != Object.class ) {
- Collections.addAll( fields, tempKlass.getDeclaredFields() );
+ Class<?> tempKlass = cls;
+ while (tempKlass != null && tempKlass != Object.class) {
+ Collections.addAll(fields, tempKlass.getDeclaredFields());
tempKlass = tempKlass.getSuperclass();
}
- List<FieldDefinition> orderedFields = new ArrayList<FieldDefinition>( fields.size() );
- for ( int i = 0; i < fields.size(); i++ ) {
+ List<FieldDefinition> orderedFields = new ArrayList<FieldDefinition>(fields.size());
+ for (int i = 0; i < fields.size(); i++) {
// as these could be set in any order, initialise first, to allow setting later.
- orderedFields.add( null );
+ orderedFields.add(null);
}
- for ( Field fld : fields ) {
- Position pos = fld.getAnnotation( Position.class );
- if ( pos != null ) {
+ for (Field fld : fields) {
+ Position pos = fld.getAnnotation(Position.class);
+ if (pos != null) {
FieldDefinition fldDef = clsDef.getField(fld.getName());
if (fldDef == null) {
- fldDef = new FieldDefinition( fld.getName(), fld.getType().getName() );
+ fldDef = new FieldDefinition(fld.getName(), fld.getType().getName());
}
- fldDef.setIndex( pos.value() );
- orderedFields.set( pos.value(), fldDef );
+ fldDef.setIndex(pos.value());
+ orderedFields.set(pos.value(), fldDef);
}
}
- for ( FieldDefinition fld : orderedFields ) {
- if ( fld != null ) {
+ for (FieldDefinition fld : orderedFields) {
+ if (fld != null) {
// it's null if there is no @Position
- clsDef.addField( fld );
+ clsDef.addField(fld);
}
}
}
- public void buildTypeDeclarations(Class< ? > cls,
- Set<TypeDeclaration> tdecls) {
+ public void buildTypeDeclarations(Class<?> cls,
+ Set<TypeDeclaration> tdecls) {
// Process current interfaces
- Class< ? >[] intfs = cls.getInterfaces();
- for ( Class< ? > intf : intfs ) {
- buildTypeDeclarationInterfaces( intf,
- tdecls );
+ Class<?>[] intfs = cls.getInterfaces();
+ for (Class<?> intf : intfs) {
+ buildTypeDeclarationInterfaces(intf,
+ tdecls);
}
// Process super classes and their interfaces
cls = cls.getSuperclass();
- while ( cls != null && cls != Object.class ) {
- if ( !buildTypeDeclarationInterfaces( cls,
- tdecls ) ) {
+ while (cls != null && cls != Object.class) {
+ if (!buildTypeDeclarationInterfaces(cls,
+ tdecls)) {
break;
}
cls = cls.getSuperclass();
@@ -1811,36 +1846,36 @@ public void buildTypeDeclarations(Class< ? > cls,
}
public boolean buildTypeDeclarationInterfaces(Class cls,
- Set<TypeDeclaration> tdecls) {
+ Set<TypeDeclaration> tdecls) {
PackageRegistry pkgReg;
- TypeDeclaration tdecl = this.builtinTypes.get( (cls.getName()) );
- if ( tdecl == null ) {
- pkgReg = this.pkgRegistryMap.get( ClassUtils.getPackage( cls ) );
- if ( pkgReg != null ) {
- tdecl = pkgReg.getPackage().getTypeDeclaration( cls.getSimpleName() );
+ TypeDeclaration tdecl = this.builtinTypes.get((cls.getName()));
+ if (tdecl == null) {
+ pkgReg = this.pkgRegistryMap.get(ClassUtils.getPackage(cls));
+ if (pkgReg != null) {
+ tdecl = pkgReg.getPackage().getTypeDeclaration(cls.getSimpleName());
}
}
- if ( tdecl != null ) {
- if ( !tdecls.add( tdecl ) ) {
+ if (tdecl != null) {
+ if (!tdecls.add(tdecl)) {
return false; // the interface already exists, return to stop recursion
}
}
- Class< ? >[] intfs = cls.getInterfaces();
- for ( Class< ? > intf : intfs ) {
- pkgReg = this.pkgRegistryMap.get( ClassUtils.getPackage( intf ) );
- if ( pkgReg != null ) {
- tdecl = pkgReg.getPackage().getTypeDeclaration( intf.getSimpleName() );
+ Class<?>[] intfs = cls.getInterfaces();
+ for (Class<?> intf : intfs) {
+ pkgReg = this.pkgRegistryMap.get(ClassUtils.getPackage(intf));
+ if (pkgReg != null) {
+ tdecl = pkgReg.getPackage().getTypeDeclaration(intf.getSimpleName());
}
- if ( tdecl != null ) {
- tdecls.add( tdecl );
+ if (tdecl != null) {
+ tdecls.add(tdecl);
}
}
- for ( Class< ? > intf : intfs ) {
- if ( !buildTypeDeclarationInterfaces( intf,
- tdecls ) ) {
+ for (Class<?> intf : intfs) {
+ if (!buildTypeDeclarationInterfaces(intf,
+ tdecls)) {
return false;
}
}
@@ -1864,12 +1899,12 @@ public boolean buildTypeDeclarationInterfaces(Class cls,
* @return the fully qualified name of the superclass
*/
private String resolveType(String sup,
- PackageDescr packageDescr,
- PackageRegistry pkgRegistry) {
+ PackageDescr packageDescr,
+ PackageRegistry pkgRegistry) {
//look among imports
- for ( ImportDescr id : packageDescr.getImports() ) {
- if ( id.getTarget().endsWith( "." + sup ) ) {
+ for (ImportDescr id : packageDescr.getImports()) {
+ if (id.getTarget().endsWith("." + sup)) {
//logger.info("Replace supertype " + sup + " with full name " + id.getTarget());
return id.getTarget();
@@ -1877,15 +1912,17 @@ private String resolveType(String sup,
}
//look among local declarations
- if ( pkgRegistry != null ) {
- for ( String declaredName : pkgRegistry.getPackage().getTypeDeclarations().keySet() ) {
- if ( declaredName.equals( sup ) ) sup = pkgRegistry.getPackage().getTypeDeclaration( declaredName ).getTypeClass().getName();
+ if (pkgRegistry != null) {
+ for (String declaredName : pkgRegistry.getPackage().getTypeDeclarations().keySet()) {
+ if (declaredName.equals(sup))
+ sup = pkgRegistry.getPackage().getTypeDeclaration(declaredName).getTypeClass().getName();
}
}
- if ( (sup != null) && (!sup.contains( "." )) && (packageDescr.getNamespace() != null && !packageDescr.getNamespace().isEmpty()) ) {
- for ( AbstractClassTypeDeclarationDescr td : packageDescr.getClassAndEnumDeclarationDescrs() ) {
- if ( sup.equals( td.getTypeName() ) ) sup = packageDescr.getNamespace() + "." + sup;
+ if ((sup != null) && (!sup.contains(".")) && (packageDescr.getNamespace() != null && !packageDescr.getNamespace().isEmpty())) {
+ for (AbstractClassTypeDeclarationDescr td : packageDescr.getClassAndEnumDeclarationDescrs()) {
+ if (sup.equals(td.getTypeName()))
+ sup = packageDescr.getNamespace() + "." + sup;
}
}
@@ -1906,34 +1943,34 @@ private String resolveType(String sup,
* the descriptor of the package the class is declared in
*/
private void fillSuperType(TypeDeclarationDescr typeDescr,
- PackageDescr packageDescr) {
+ PackageDescr packageDescr) {
- for ( QualifiedName qname : typeDescr.getSuperTypes() ) {
+ for (QualifiedName qname : typeDescr.getSuperTypes()) {
String declaredSuperType = qname.getFullName();
- if ( declaredSuperType != null ) {
- int separator = declaredSuperType.lastIndexOf( "." );
+ if (declaredSuperType != null) {
+ int separator = declaredSuperType.lastIndexOf(".");
boolean qualified = separator > 0;
// check if a simple name corresponds to a f.q.n.
- if ( !qualified ) {
+ if (!qualified) {
declaredSuperType =
- resolveType( declaredSuperType,
- packageDescr,
- this.pkgRegistryMap.get( typeDescr.getNamespace() ) );
+ resolveType(declaredSuperType,
+ packageDescr,
+ this.pkgRegistryMap.get(typeDescr.getNamespace()));
- declaredSuperType = typeName2ClassName( declaredSuperType );
+ declaredSuperType = typeName2ClassName(declaredSuperType);
// sets supertype name and supertype package
- separator = declaredSuperType.lastIndexOf( "." );
- if ( separator < 0 ) {
- this.results.add( new TypeDeclarationError( typeDescr,
- "Cannot resolve supertype '" + declaredSuperType + "'" ) );
- qname.setName( null );
- qname.setNamespace( null );
+ separator = declaredSuperType.lastIndexOf(".");
+ if (separator < 0) {
+ this.results.add(new TypeDeclarationError(typeDescr,
+ "Cannot resolve supertype '" + declaredSuperType + "'"));
+ qname.setName(null);
+ qname.setNamespace(null);
} else {
- qname.setName( declaredSuperType.substring( separator + 1 ) );
- qname.setNamespace( declaredSuperType.substring( 0,
- separator ) );
+ qname.setName(declaredSuperType.substring(separator + 1));
+ qname.setNamespace(declaredSuperType.substring(0,
+ separator));
}
}
}
@@ -1941,45 +1978,45 @@ private void fillSuperType(TypeDeclarationDescr typeDescr,
}
private String typeName2ClassName(String type) {
- Class< ? > cls = getClassForType( type );
+ Class<?> cls = getClassForType(type);
return cls != null ? cls.getName() : type;
}
- private Class< ? > getClassForType(String type) {
- Class< ? > cls = null;
+ private Class<?> getClassForType(String type) {
+ Class<?> cls = null;
String superType = type;
- while ( true ) {
+ while (true) {
try {
- cls = Class.forName( superType, true, this.rootClassLoader );
+ cls = Class.forName(superType, true, this.rootClassLoader);
break;
- } catch ( ClassNotFoundException e ) {
+ } catch (ClassNotFoundException e) {
}
- int separator = superType.lastIndexOf( '.' );
- if ( separator < 0 ) {
+ int separator = superType.lastIndexOf('.');
+ if (separator < 0) {
break;
}
- superType = superType.substring( 0, separator ) + "$" + superType.substring( separator + 1 );
+ superType = superType.substring(0, separator) + "$" + superType.substring(separator + 1);
}
return cls;
}
private void fillFieldTypes(AbstractClassTypeDeclarationDescr typeDescr,
- PackageDescr packageDescr) {
+ PackageDescr packageDescr) {
- for ( TypeFieldDescr field : typeDescr.getFields().values() ) {
+ for (TypeFieldDescr field : typeDescr.getFields().values()) {
String declaredType = field.getPattern().getObjectType();
- if ( declaredType != null ) {
- int separator = declaredType.lastIndexOf( "." );
+ if (declaredType != null) {
+ int separator = declaredType.lastIndexOf(".");
boolean qualified = separator > 0;
// check if a simple name corresponds to a f.q.n.
- if ( !qualified ) {
+ if (!qualified) {
declaredType =
- resolveType( declaredType,
- packageDescr,
- this.pkgRegistryMap.get( typeDescr.getNamespace() ) );
+ resolveType(declaredType,
+ packageDescr,
+ this.pkgRegistryMap.get(typeDescr.getNamespace()));
- field.getPattern().setObjectType( declaredType );
+ field.getPattern().setObjectType(declaredType);
}
}
}
@@ -2008,40 +2045,41 @@ private void fillFieldTypes(AbstractClassTypeDeclarationDescr typeDescr,
*/
private boolean mergeInheritedFields(TypeDeclarationDescr typeDescr) {
- if ( typeDescr.getSuperTypes().isEmpty() ) return false;
+ if (typeDescr.getSuperTypes().isEmpty())
+ return false;
boolean merge = false;
- for ( int j = typeDescr.getSuperTypes().size() - 1; j >= 0; j-- ) {
- QualifiedName qname = typeDescr.getSuperTypes().get( j );
+ for (int j = typeDescr.getSuperTypes().size() - 1; j >= 0; j--) {
+ QualifiedName qname = typeDescr.getSuperTypes().get(j);
String simpleSuperTypeName = qname.getName();
String superTypePackageName = qname.getNamespace();
String fullSuper = qname.getFullName();
- merge = mergeInheritedFields( simpleSuperTypeName,
- superTypePackageName,
- fullSuper,
- typeDescr ) || merge;
+ merge = mergeInheritedFields(simpleSuperTypeName,
+ superTypePackageName,
+ fullSuper,
+ typeDescr) || merge;
}
return merge;
}
private boolean mergeInheritedFields(String simpleSuperTypeName,
- String superTypePackageName,
- String fullSuper,
- TypeDeclarationDescr typeDescr) {
+ String superTypePackageName,
+ String fullSuper,
+ TypeDeclarationDescr typeDescr) {
Map<String, TypeFieldDescr> fieldMap = new LinkedHashMap<String, TypeFieldDescr>();
- PackageRegistry registry = this.pkgRegistryMap.get( superTypePackageName );
+ PackageRegistry registry = this.pkgRegistryMap.get(superTypePackageName);
Package pack;
- if ( registry != null ) {
+ if (registry != null) {
pack = registry.getPackage();
} else {
// If there is no regisrty the type isn't a DRL-declared type, which is forbidden.
// Avoid NPE JIRA-3041 when trying to access the registry. Avoid subsequent problems.
- this.results.add( new TypeDeclarationError( typeDescr, "Cannot extend supertype '" + fullSuper + "' (not a declared type)" ) );
- typeDescr.setType( null, null );
+ this.results.add(new TypeDeclarationError(typeDescr, "Cannot extend supertype '" + fullSuper + "' (not a declared type)"));
+ typeDescr.setType(null, null);
return false;
}
@@ -2049,18 +2087,18 @@ private boolean mergeInheritedFields(String simpleSuperTypeName,
boolean isSuperClassTagged = false;
boolean isSuperClassDeclared = true; //in the same package, or in a previous one
- if ( pack != null ) {
+ if (pack != null) {
// look for the supertype declaration in available packages
- TypeDeclaration superTypeDeclaration = pack.getTypeDeclaration( simpleSuperTypeName );
+ TypeDeclaration superTypeDeclaration = pack.getTypeDeclaration(simpleSuperTypeName);
- if ( superTypeDeclaration != null ) {
+ if (superTypeDeclaration != null) {
ClassDefinition classDef = superTypeDeclaration.getTypeClassDef();
// inherit fields
- for ( FactField fld : classDef.getFields() ) {
- TypeFieldDescr inheritedFlDescr = buildInheritedFieldDescrFromDefinition( fld, typeDescr );
- fieldMap.put( inheritedFlDescr.getFieldName(),
- inheritedFlDescr );
+ for (FactField fld : classDef.getFields()) {
+ TypeFieldDescr inheritedFlDescr = buildInheritedFieldDescrFromDefinition(fld, typeDescr);
+ fieldMap.put(inheritedFlDescr.getFieldName(),
+ inheritedFlDescr);
}
// new classes are already distinguished from tagged external classes
@@ -2074,113 +2112,114 @@ private boolean mergeInheritedFields(String simpleSuperTypeName,
}
// look for the class externally
- if ( !isSuperClassDeclared || isSuperClassTagged ) {
+ if (!isSuperClassDeclared || isSuperClassTagged) {
try {
- Class superKlass = registry.getTypeResolver().resolveType( fullSuper );
- ClassFieldInspector inspector = new ClassFieldInspector( superKlass );
- for ( String name : inspector.getGetterMethods().keySet() ) {
+ Class superKlass = registry.getTypeResolver().resolveType(fullSuper);
+ ClassFieldInspector inspector = new ClassFieldInspector(superKlass);
+ for (String name : inspector.getGetterMethods().keySet()) {
// classFieldAccessor requires both getter and setter
- if ( inspector.getSetterMethods().containsKey( name ) ) {
- if ( !inspector.isNonGetter( name ) && !"class".equals( name ) ) {
+ if (inspector.getSetterMethods().containsKey(name)) {
+ if (!inspector.isNonGetter(name) && !"class".equals(name)) {
TypeFieldDescr inheritedFlDescr = new TypeFieldDescr(
- name,
- new PatternDescr(
- inspector.getFieldTypes().get( name ).getName() ) );
+ name,
+ new PatternDescr(
+ inspector.getFieldTypes().get(name).getName()));
inheritedFlDescr.setInherited(!Modifier.isAbstract(inspector.getGetterMethods().get(name).getModifiers()));
- if ( !fieldMap.containsKey( inheritedFlDescr.getFieldName() ) ) fieldMap.put( inheritedFlDescr.getFieldName(),
- inheritedFlDescr );
+ if (!fieldMap.containsKey(inheritedFlDescr.getFieldName()))
+ fieldMap.put(inheritedFlDescr.getFieldName(),
+ inheritedFlDescr);
}
}
}
- } catch ( ClassNotFoundException cnfe ) {
- throw new RuntimeDroolsException( "Unable to resolve Type Declaration superclass '" + fullSuper + "'" );
- } catch ( IOException e ) {
+ } catch (ClassNotFoundException cnfe) {
+ throw new RuntimeDroolsException("Unable to resolve Type Declaration superclass '" + fullSuper + "'");
+ } catch (IOException e) {
}
}
// finally, locally declared fields are merged. The map swap ensures that super-fields are added in order, before the subclass' ones
// notice that it is not possible to override a field changing its type
- for ( String fieldName : typeDescr.getFields().keySet() ) {
- if ( fieldMap.containsKey( fieldName ) ) {
- String type1 = fieldMap.get( fieldName ).getPattern().getObjectType();
- String type2 = typeDescr.getFields().get( fieldName ).getPattern().getObjectType();
- if ( type2.lastIndexOf( "." ) < 0 ) {
+ for (String fieldName : typeDescr.getFields().keySet()) {
+ if (fieldMap.containsKey(fieldName)) {
+ String type1 = fieldMap.get(fieldName).getPattern().getObjectType();
+ String type2 = typeDescr.getFields().get(fieldName).getPattern().getObjectType();
+ if (type2.lastIndexOf(".") < 0) {
try {
- TypeResolver typeResolver = pkgRegistryMap.get( pack.getName() ).getTypeResolver();
- type1 = typeResolver.resolveType( type1 ).getName();
- type2 = typeResolver.resolveType( type2 ).getName();
+ TypeResolver typeResolver = pkgRegistryMap.get(pack.getName()).getTypeResolver();
+ type1 = typeResolver.resolveType(type1).getName();
+ type2 = typeResolver.resolveType(type2).getName();
// now that we are at it... this will be needed later anyway
- fieldMap.get( fieldName ).getPattern().setObjectType( type1 );
- typeDescr.getFields().get( fieldName ).getPattern().setObjectType( type2 );
- } catch ( ClassNotFoundException cnfe ) {
+ fieldMap.get(fieldName).getPattern().setObjectType(type1);
+ typeDescr.getFields().get(fieldName).getPattern().setObjectType(type2);
+ } catch (ClassNotFoundException cnfe) {
// will fail later
}
}
- if ( !type1.equals( type2 ) ) {
- this.results.add( new TypeDeclarationError( typeDescr,
- "Cannot redeclare field '" + fieldName + " from " + type1 + " to " + type2 ) );
- typeDescr.setType( null,
- null );
+ if (!type1.equals(type2)) {
+ this.results.add(new TypeDeclarationError(typeDescr,
+ "Cannot redeclare field '" + fieldName + " from " + type1 + " to " + type2));
+ typeDescr.setType(null,
+ null);
return false;
} else {
- String initVal = fieldMap.get( fieldName ).getInitExpr();
- if ( typeDescr.getFields().get( fieldName ).getInitExpr() == null ) {
- typeDescr.getFields().get( fieldName ).setInitExpr( initVal );
+ String initVal = fieldMap.get(fieldName).getInitExpr();
+ if (typeDescr.getFields().get(fieldName).getInitExpr() == null) {
+ typeDescr.getFields().get(fieldName).setInitExpr(initVal);
}
- typeDescr.getFields().get( fieldName ).setInherited( fieldMap.get( fieldName ).isInherited() );
+ typeDescr.getFields().get(fieldName).setInherited(fieldMap.get(fieldName).isInherited());
- for ( String key : fieldMap.get( fieldName ).getAnnotationNames() ) {
- if ( typeDescr.getFields().get( fieldName ).getAnnotation( key ) == null ) {
- typeDescr.getFields().get( fieldName ).addAnnotation( fieldMap.get( fieldName ).getAnnotation( key ) );
+ for (String key : fieldMap.get(fieldName).getAnnotationNames()) {
+ if (typeDescr.getFields().get(fieldName).getAnnotation(key) == null) {
+ typeDescr.getFields().get(fieldName).addAnnotation(fieldMap.get(fieldName).getAnnotation(key));
}
}
- if ( typeDescr.getFields().get( fieldName ).getIndex() < 0 ) {
- typeDescr.getFields().get( fieldName ).setIndex( fieldMap.get( fieldName ).getIndex() );
+ if (typeDescr.getFields().get(fieldName).getIndex() < 0) {
+ typeDescr.getFields().get(fieldName).setIndex(fieldMap.get(fieldName).getIndex());
}
}
}
- fieldMap.put( fieldName,
- typeDescr.getFields().get( fieldName ) );
+ fieldMap.put(fieldName,
+ typeDescr.getFields().get(fieldName));
}
- typeDescr.setFields( fieldMap );
+ typeDescr.setFields(fieldMap);
return true;
}
- protected TypeFieldDescr buildInheritedFieldDescrFromDefinition( FactField fld, TypeDeclarationDescr typeDescr ) {
+ protected TypeFieldDescr buildInheritedFieldDescrFromDefinition(FactField fld, TypeDeclarationDescr typeDescr) {
PatternDescr fldType = new PatternDescr();
TypeFieldDescr inheritedFldDescr = new TypeFieldDescr();
- inheritedFldDescr.setFieldName( fld.getName() );
- fldType.setObjectType( ((FieldDefinition) fld).getFieldAccessor().getExtractToClassName() );
- inheritedFldDescr.setPattern( fldType );
- if ( fld.isKey() ) {
- inheritedFldDescr.getAnnotations().put( TypeDeclaration.ATTR_KEY,
- new AnnotationDescr( TypeDeclaration.ATTR_KEY ) );
- }
- inheritedFldDescr.setIndex( ( (FieldDefinition) fld ).getDeclIndex() );
- inheritedFldDescr.setInherited( true );
-
- String initExprOverride = ( (FieldDefinition) fld ).getInitExpr();
- int overrideCount = 0;
- // only @aliasing local fields may override defaults.
- for ( TypeFieldDescr localField : typeDescr.getFields().values() ) {
- AnnotationDescr ann = localField.getAnnotation( "Alias" );
- if ( ann != null && fld.getName().equals( ann.getSingleValue().replaceAll( "\"", "" ) ) && localField.getInitExpr() != null ) {
- overrideCount++;
- initExprOverride = localField.getInitExpr();
- }
- }
- if ( overrideCount > 1 ) {
- // however, only one is allowed
- initExprOverride = null;
- }
- inheritedFldDescr.setInitExpr( initExprOverride );
+ inheritedFldDescr.setFieldName(fld.getName());
+ fldType.setObjectType(((FieldDefinition) fld).getFieldAccessor().getExtractToClassName());
+ inheritedFldDescr.setPattern(fldType);
+ if (fld.isKey()) {
+ inheritedFldDescr.getAnnotations().put(TypeDeclaration.ATTR_KEY,
+ new AnnotationDescr(TypeDeclaration.ATTR_KEY));
+ }
+ inheritedFldDescr.setIndex(((FieldDefinition) fld).getDeclIndex());
+ inheritedFldDescr.setInherited(true);
+
+ String initExprOverride = ((FieldDefinition) fld).getInitExpr();
+ int overrideCount = 0;
+ // only @aliasing local fields may override defaults.
+ for (TypeFieldDescr localField : typeDescr.getFields().values()) {
+ AnnotationDescr ann = localField.getAnnotation("Alias");
+ if (ann != null && fld.getName().equals(ann.getSingleValue().replaceAll("\"", "")) && localField.getInitExpr() != null) {
+ overrideCount++;
+ initExprOverride = localField.getInitExpr();
+ }
+ }
+ if (overrideCount > 1) {
+ // however, only one is allowed
+ initExprOverride = null;
+ }
+ inheritedFldDescr.setInitExpr(initExprOverride);
return inheritedFldDescr;
}
@@ -2188,173 +2227,174 @@ protected TypeFieldDescr buildInheritedFieldDescrFromDefinition( FactField fld,
* @param packageDescr
*/
void processEntryPointDeclarations(PackageRegistry pkgRegistry,
- PackageDescr packageDescr) {
- for ( EntryPointDeclarationDescr epDescr : packageDescr.getEntryPointDeclarations() ) {
- pkgRegistry.getPackage().addEntryPointId( epDescr.getEntryPointId() );
+ PackageDescr packageDescr) {
+ for (EntryPointDeclarationDescr epDescr : packageDescr.getEntryPointDeclarations()) {
+ pkgRegistry.getPackage().addEntryPointId(epDescr.getEntryPointId());
}
}
private void processWindowDeclarations(PackageRegistry pkgRegistry,
- PackageDescr packageDescr) {
- for ( WindowDeclarationDescr wd : packageDescr.getWindowDeclarations() ) {
- WindowDeclaration window = new WindowDeclaration( wd.getName(), packageDescr.getName() );
+ PackageDescr packageDescr) {
+ for (WindowDeclarationDescr wd : packageDescr.getWindowDeclarations()) {
+ WindowDeclaration window = new WindowDeclaration(wd.getName(), packageDescr.getName());
// TODO: process annotations
// process pattern
Package pkg = pkgRegistry.getPackage();
DialectCompiletimeRegistry ctr = pkgRegistry.getDialectCompiletimeRegistry();
- RuleDescr dummy = new RuleDescr( wd.getName() + " Window Declaration" );
- dummy.addAttribute( new AttributeDescr( "dialect", "java" ) );
- RuleBuildContext context = new RuleBuildContext( this,
- dummy,
- ctr,
- pkg,
- ctr.getDialect( pkgRegistry.getDialect() ) );
- final RuleConditionBuilder builder = (RuleConditionBuilder) context.getDialect().getBuilder( wd.getPattern().getClass() );
- if ( builder != null ) {
- final Pattern pattern = (Pattern) builder.build( context,
- wd.getPattern(),
- null );
-
- window.setPattern( pattern );
+ RuleDescr dummy = new RuleDescr(wd.getName() + " Window Declaration");
+ dummy.addAttribute(new AttributeDescr("dialect", "java"));
+ RuleBuildContext context = new RuleBuildContext(this,
+ dummy,
+ ctr,
+ pkg,
+ ctr.getDialect(pkgRegistry.getDialect()));
+ final RuleConditionBuilder builder = (RuleConditionBuilder) context.getDialect().getBuilder(wd.getPattern().getClass());
+ if (builder != null) {
+ final Pattern pattern = (Pattern) builder.build(context,
+ wd.getPattern(),
+ null);
+
+ window.setPattern(pattern);
} else {
throw new RuntimeDroolsException(
- "BUG: builder not found for descriptor class " + wd.getPattern().getClass() );
+ "BUG: builder not found for descriptor class " + wd.getPattern().getClass());
}
- if ( !context.getErrors().isEmpty() ) {
- for ( DroolsError error : context.getErrors() ) {
- this.results.add( error );
+ if (!context.getErrors().isEmpty()) {
+ for (DroolsError error : context.getErrors()) {
+ this.results.add(error);
}
} else {
- pkgRegistry.getPackage().addWindowDeclaration( window );
+ pkgRegistry.getPackage().addWindowDeclaration(window);
}
}
}
void registerGeneratedType(AbstractClassTypeDeclarationDescr typeDescr) {
String fullName = typeDescr.getType().getFullName();
- generatedTypes.add( fullName );
+ generatedTypes.add(fullName);
}
/**
* @param packageDescr
*/
List<TypeDefinition> processTypeDeclarations(PackageRegistry pkgRegistry, PackageDescr packageDescr, List<TypeDefinition> unresolvedTypes) {
- for ( AbstractClassTypeDeclarationDescr typeDescr : packageDescr.getClassAndEnumDeclarationDescrs() ) {
-
- String qName = typeDescr.getType().getFullName();
- Class< ? > typeClass = getClassForType( qName );
- if ( typeClass == null ) {
- typeClass = getClassForType( typeDescr.getTypeName() );
- }
- if ( typeClass == null ) {
- for ( ImportDescr id : packageDescr.getImports() ) {
- String imp = id.getTarget();
- int separator = imp.lastIndexOf( '.' );
- String tail = imp.substring( separator + 1 );
- if ( tail.equals( typeDescr.getTypeName() ) ) {
- typeDescr.setNamespace( imp.substring( 0, separator ) );
- typeClass = getClassForType( typeDescr.getType().getFullName() );
- break;
- } else if ( tail.equals("*") ) {
- typeClass = getClassForType( imp.substring(0, imp.length()-1) + typeDescr.getType().getName() );
- if (typeClass != null) {
- typeDescr.setNamespace( imp.substring( 0, separator ) );
+ for (AbstractClassTypeDeclarationDescr typeDescr : packageDescr.getClassAndEnumDeclarationDescrs()) {
+ if (filterAccepts(typeDescr.getNamespace(), typeDescr.getTypeName()) ) {
+
+ String qName = typeDescr.getType().getFullName();
+ Class<?> typeClass = getClassForType(qName);
+ if (typeClass == null) {
+ typeClass = getClassForType(typeDescr.getTypeName());
+ }
+ if (typeClass == null) {
+ for (ImportDescr id : packageDescr.getImports()) {
+ String imp = id.getTarget();
+ int separator = imp.lastIndexOf('.');
+ String tail = imp.substring(separator + 1);
+ if (tail.equals(typeDescr.getTypeName())) {
+ typeDescr.setNamespace(imp.substring(0, separator));
+ typeClass = getClassForType(typeDescr.getType().getFullName());
break;
+ } else if (tail.equals("*")) {
+ typeClass = getClassForType(imp.substring(0, imp.length() - 1) + typeDescr.getType().getName());
+ if (typeClass != null) {
+ typeDescr.setNamespace(imp.substring(0, separator));
+ break;
+ }
}
}
}
- }
- String className = typeClass != null ? typeClass.getName() : qName;
- int dotPos = className.lastIndexOf( '.' );
- if ( dotPos >= 0 ) {
- typeDescr.setNamespace( className.substring( 0, dotPos ) );
- typeDescr.setTypeName( className.substring( dotPos + 1 ) );
- }
+ String className = typeClass != null ? typeClass.getName() : qName;
+ int dotPos = className.lastIndexOf('.');
+ if (dotPos >= 0) {
+ typeDescr.setNamespace(className.substring(0, dotPos));
+ typeDescr.setTypeName(className.substring(dotPos + 1));
+ }
- if ( isEmpty( typeDescr.getNamespace() ) && typeDescr.getFields().isEmpty() ) {
- // might be referencing a class imported with a package import (.*)
- PackageRegistry pkgReg = this.pkgRegistryMap.get( packageDescr.getName() );
- if ( pkgReg != null ) {
- try {
- Class< ? > clz = pkgReg.getTypeResolver().resolveType( typeDescr.getTypeName() );
- java.lang.Package pkg = clz.getPackage();
- if ( pkg != null ) {
- typeDescr.setNamespace( pkg.getName() );
- int index = typeDescr.getNamespace() != null && !typeDescr.getNamespace().isEmpty() ? typeDescr.getNamespace().length() + 1 : 0;
- typeDescr.setTypeName( clz.getCanonicalName().substring( index ) );
+ if (isEmpty(typeDescr.getNamespace()) && typeDescr.getFields().isEmpty()) {
+ // might be referencing a class imported with a package import (.*)
+ PackageRegistry pkgReg = this.pkgRegistryMap.get(packageDescr.getName());
+ if (pkgReg != null) {
+ try {
+ Class<?> clz = pkgReg.getTypeResolver().resolveType(typeDescr.getTypeName());
+ java.lang.Package pkg = clz.getPackage();
+ if (pkg != null) {
+ typeDescr.setNamespace(pkg.getName());
+ int index = typeDescr.getNamespace() != null && !typeDescr.getNamespace().isEmpty() ? typeDescr.getNamespace().length() + 1 : 0;
+ typeDescr.setTypeName(clz.getCanonicalName().substring(index));
+ }
+ } catch (Exception e) {
+ // intentionally eating the exception as we will fallback to default namespace
}
- } catch ( Exception e ) {
- // intentionally eating the exception as we will fallback to default namespace
}
}
- }
-
- if ( isEmpty( typeDescr.getNamespace() ) ) {
- typeDescr.setNamespace( packageDescr.getNamespace() ); // set the default namespace
- }
-
- //identify superclass type and namespace
- if ( typeDescr instanceof TypeDeclarationDescr ) {
- fillSuperType( (TypeDeclarationDescr) typeDescr,
- packageDescr );
- }
-
- //identify field types as well
- fillFieldTypes( typeDescr,
- packageDescr );
- if ( !typeDescr.getNamespace().equals( packageDescr.getNamespace() ) ) {
- // If the type declaration is for a different namespace, process that separately.
- PackageDescr altDescr = new PackageDescr( typeDescr.getNamespace() );
- if ( typeDescr instanceof TypeDeclarationDescr ) {
- altDescr.addTypeDeclaration( (TypeDeclarationDescr) typeDescr );
- } else if ( typeDescr instanceof EnumDeclarationDescr ) {
- altDescr.addEnumDeclaration( (EnumDeclarationDescr) typeDescr );
+ if (isEmpty(typeDescr.getNamespace())) {
+ typeDescr.setNamespace(packageDescr.getNamespace()); // set the default namespace
}
- for ( ImportDescr imp : packageDescr.getImports() ) {
- altDescr.addImport( imp );
+ //identify superclass type and namespace
+ if (typeDescr instanceof TypeDeclarationDescr) {
+ fillSuperType((TypeDeclarationDescr) typeDescr,
+ packageDescr);
}
- if ( !getPackageRegistry().containsKey( altDescr.getNamespace() ) ) {
- newPackage( altDescr );
+
+ //identify field types as well
+ fillFieldTypes(typeDescr,
+ packageDescr);
+
+ if (!typeDescr.getNamespace().equals(packageDescr.getNamespace())) {
+ // If the type declaration is for a different namespace, process that separately.
+ PackageDescr altDescr = new PackageDescr(typeDescr.getNamespace());
+ if (typeDescr instanceof TypeDeclarationDescr) {
+ altDescr.addTypeDeclaration((TypeDeclarationDescr) typeDescr);
+ } else if (typeDescr instanceof EnumDeclarationDescr) {
+ altDescr.addEnumDeclaration((EnumDeclarationDescr) typeDescr);
+ }
+
+ for (ImportDescr imp : packageDescr.getImports()) {
+ altDescr.addImport(imp);
+ }
+ if (!getPackageRegistry().containsKey(altDescr.getNamespace())) {
+ newPackage(altDescr);
+ }
+ mergePackage(this.pkgRegistryMap.get(altDescr.getNamespace()), altDescr);
}
- mergePackage( this.pkgRegistryMap.get( altDescr.getNamespace() ), altDescr );
}
-
}
// sort declarations : superclasses must be generated first
- Collection<AbstractClassTypeDeclarationDescr> sortedTypeDescriptors = sortByHierarchy( packageDescr.getClassAndEnumDeclarationDescrs() );
+ Collection<AbstractClassTypeDeclarationDescr> sortedTypeDescriptors = sortByHierarchy(packageDescr.getClassAndEnumDeclarationDescrs());
- for ( AbstractClassTypeDeclarationDescr typeDescr : sortedTypeDescriptors ) {
- registerGeneratedType( typeDescr );
+ for (AbstractClassTypeDeclarationDescr typeDescr : sortedTypeDescriptors) {
+ registerGeneratedType(typeDescr);
}
- if ( hasErrors() ) {
+ if (hasErrors()) {
return Collections.emptyList();
}
- for ( AbstractClassTypeDeclarationDescr typeDescr : sortedTypeDescriptors ) {
+ for (AbstractClassTypeDeclarationDescr typeDescr : sortedTypeDescriptors) {
- if ( !typeDescr.getNamespace().equals( packageDescr.getNamespace() ) ) {
+ if (!typeDescr.getNamespace().equals(packageDescr.getNamespace())) {
continue;
}
//descriptor needs fields inherited from superclass
- if ( typeDescr instanceof TypeDeclarationDescr ) {
+ if (typeDescr instanceof TypeDeclarationDescr) {
TypeDeclarationDescr tDescr = (TypeDeclarationDescr) typeDescr;
- for ( QualifiedName qname : tDescr.getSuperTypes() ) {
+ for (QualifiedName qname : tDescr.getSuperTypes()) {
//descriptor needs fields inherited from superclass
- if ( mergeInheritedFields( tDescr ) ) {
+ if (mergeInheritedFields(tDescr)) {
//descriptor also needs metadata from superclass
- for ( AbstractClassTypeDeclarationDescr descr : sortedTypeDescriptors ) {
+ for (AbstractClassTypeDeclarationDescr descr : sortedTypeDescriptors) {
// sortedTypeDescriptors are sorted by inheritance order, so we'll always find the superClass (if any) before the subclass
- if ( qname.equals( descr.getType() ) ) {
- typeDescr.getAnnotations().putAll( descr.getAnnotations() );
+ if (qname.equals(descr.getType())) {
+ typeDescr.getAnnotations().putAll(descr.getAnnotations());
break;
- } else if ( typeDescr.getType().equals( descr.getType() ) ) {
+ } else if (typeDescr.getType().equals(descr.getType())) {
break;
}
@@ -2364,66 +2404,66 @@ List<TypeDefinition> processTypeDeclarations(PackageRegistry pkgRegistry, Packag
}
// Go on with the build
- TypeDeclaration type = new TypeDeclaration( typeDescr.getTypeName() );
- if ( typeDescr.getResource() == null ) {
- typeDescr.setResource( resource );
+ TypeDeclaration type = new TypeDeclaration(typeDescr.getTypeName());
+ if (typeDescr.getResource() == null) {
+ typeDescr.setResource(resource);
}
- type.setResource( typeDescr.getResource() );
+ type.setResource(typeDescr.getResource());
TypeDeclaration parent = null;
- if ( !typeDescr.getSuperTypes().isEmpty() ) {
+ if (!typeDescr.getSuperTypes().isEmpty()) {
// parent might have inheritable properties
- PackageRegistry sup = pkgRegistryMap.get( typeDescr.getSuperTypeNamespace() );
- if ( sup != null ) {
- parent = sup.getPackage().getTypeDeclaration( typeDescr.getSuperTypeName() );
- if ( parent == null ) {
- this.results.add( new TypeDeclarationError( typeDescr, "Declared class " + typeDescr.getTypeName() + " can't extend class " + typeDescr.getSuperTypeName() + ", it should be declared" ) );
+ PackageRegistry sup = pkgRegistryMap.get(typeDescr.getSuperTypeNamespace());
+ if (sup != null) {
+ parent = sup.getPackage().getTypeDeclaration(typeDescr.getSuperTypeName());
+ if (parent == null) {
+ this.results.add(new TypeDeclarationError(typeDescr, "Declared class " + typeDescr.getTypeName() + " can't extend class " + typeDescr.getSuperTypeName() + ", it should be declared"));
} else {
- if ( parent.getNature() == TypeDeclaration.Nature.DECLARATION && ruleBase != null ) {
+ if (parent.getNature() == TypeDeclaration.Nature.DECLARATION && ruleBase != null) {
// trying to find a definition
- parent = ruleBase.getPackagesMap().get( typeDescr.getSuperTypeNamespace() ).getTypeDeclaration( typeDescr.getSuperTypeName() );
+ parent = ruleBase.getPackagesMap().get(typeDescr.getSuperTypeNamespace()).getTypeDeclaration(typeDescr.getSuperTypeName());
}
}
}
}
// is it a regular fact or an event?
- AnnotationDescr annotationDescr = typeDescr.getAnnotation( TypeDeclaration.Role.ID );
+ AnnotationDescr annotationDescr = typeDescr.getAnnotation(TypeDeclaration.Role.ID);
String role = (annotationDescr != null) ? annotationDescr.getSingleValue() : null;
- if ( role != null ) {
- type.setRole( TypeDeclaration.Role.parseRole( role ) );
- } else if ( parent != null ) {
- type.setRole( parent.getRole() );
+ if (role != null) {
+ type.setRole(TypeDeclaration.Role.parseRole(role));
+ } else if (parent != null) {
+ type.setRole(parent.getRole());
}
- annotationDescr = typeDescr.getAnnotation( TypeDeclaration.ATTR_TYPESAFE );
+ annotationDescr = typeDescr.getAnnotation(TypeDeclaration.ATTR_TYPESAFE);
String typesafe = (annotationDescr != null) ? annotationDescr.getSingleValue() : null;
- if ( typesafe != null ) {
- type.setTypesafe( Boolean.parseBoolean( typesafe ) );
- } else if ( parent != null ) {
- type.setTypesafe( parent.isTypesafe() );
+ if (typesafe != null) {
+ type.setTypesafe(Boolean.parseBoolean(typesafe));
+ } else if (parent != null) {
+ type.setTypesafe(parent.isTypesafe());
}
// is it a pojo or a template?
- annotationDescr = typeDescr.getAnnotation( TypeDeclaration.Format.ID );
+ annotationDescr = typeDescr.getAnnotation(TypeDeclaration.Format.ID);
String format = (annotationDescr != null) ? annotationDescr.getSingleValue() : null;
- if ( format != null ) {
- type.setFormat( TypeDeclaration.Format.parseFormat( format ) );
+ if (format != null) {
+ type.setFormat(TypeDeclaration.Format.parseFormat(format));
}
// is it a class, a trait or an enum?
- annotationDescr = typeDescr.getAnnotation( TypeDeclaration.Kind.ID );
+ annotationDescr = typeDescr.getAnnotation(TypeDeclaration.Kind.ID);
String kind = (annotationDescr != null) ? annotationDescr.getSingleValue() : null;
- if ( kind != null ) {
- type.setKind( TypeDeclaration.Kind.parseKind( kind ) );
+ if (kind != null) {
+ type.setKind(TypeDeclaration.Kind.parseKind(kind));
}
- if ( typeDescr instanceof EnumDeclarationDescr ) {
- type.setKind( TypeDeclaration.Kind.ENUM );
+ if (typeDescr instanceof EnumDeclarationDescr) {
+ type.setKind(TypeDeclaration.Kind.ENUM);
}
- annotationDescr = typeDescr.getAnnotation( TypeDeclaration.ATTR_CLASS );
+ annotationDescr = typeDescr.getAnnotation(TypeDeclaration.ATTR_CLASS);
String className = (annotationDescr != null) ? annotationDescr.getSingleValue() : null;
- if ( StringUtils.isEmpty( className ) ) {
+ if (StringUtils.isEmpty(className)) {
className = type.getTypeName();
}
@@ -2431,26 +2471,26 @@ List<TypeDefinition> processTypeDeclarations(PackageRegistry pkgRegistry, Packag
// the type declaration is generated in any case (to be used by subclasses, if any)
// the actual class will be generated only if needed
- if ( ! hasErrors() ) {
- generateDeclaredBean( typeDescr,
- type,
- pkgRegistry,
- unresolvedTypes );
-
- Class< ? > clazz = pkgRegistry.getTypeResolver().resolveType( typeDescr.getType().getFullName() );
- type.setTypeClass( clazz );
+ if (!hasErrors()) {
+ generateDeclaredBean(typeDescr,
+ type,
+ pkgRegistry,
+ unresolvedTypes);
+
+ Class<?> clazz = pkgRegistry.getTypeResolver().resolveType(typeDescr.getType().getFullName());
+ type.setTypeClass(clazz);
}
- } catch ( final ClassNotFoundException e ) {
- this.results.add( new TypeDeclarationError( typeDescr,
- "Class '" + className +
- "' not found for type declaration of '" +
- type.getTypeName() + "'" ) );
+ } catch (final ClassNotFoundException e) {
+ this.results.add(new TypeDeclarationError(typeDescr,
+ "Class '" + className +
+ "' not found for type declaration of '" +
+ type.getTypeName() + "'"));
continue;
}
- if ( ! processTypeFields( pkgRegistry, typeDescr, type, true ) ) {
- unresolvedTypes.add( new TypeDefinition( type, typeDescr ) );
+ if (!processTypeFields(pkgRegistry, typeDescr, type, true)) {
+ unresolvedTypes.add(new TypeDefinition(type, typeDescr));
}
}
@@ -2458,138 +2498,138 @@ List<TypeDefinition> processTypeDeclarations(PackageRegistry pkgRegistry, Packag
}
private boolean processTypeFields(PackageRegistry pkgRegistry,
- AbstractClassTypeDeclarationDescr typeDescr,
- TypeDeclaration type,
- boolean firstAttempt) {
- if ( type.getTypeClassDef() != null ) {
+ AbstractClassTypeDeclarationDescr typeDescr,
+ TypeDeclaration type,
+ boolean firstAttempt) {
+ if (type.getTypeClassDef() != null) {
try {
- buildFieldAccessors( type, pkgRegistry );
- } catch ( Throwable e ) {
- if ( !firstAttempt ) {
- this.results.add( new TypeDeclarationError( typeDescr,
- "Error creating field accessors for TypeDeclaration '" + type.getTypeName() +
- "' for type '" +
- type.getTypeName() +
- "'" ) );
+ buildFieldAccessors(type, pkgRegistry);
+ } catch (Throwable e) {
+ if (!firstAttempt) {
+ this.results.add(new TypeDeclarationError(typeDescr,
+ "Error creating field accessors for TypeDeclaration '" + type.getTypeName() +
+ "' for type '" +
+ type.getTypeName() +
+ "'"));
}
return false;
}
}
- AnnotationDescr annotationDescr = typeDescr.getAnnotation( TypeDeclaration.ATTR_TIMESTAMP );
+ AnnotationDescr annotationDescr = typeDescr.getAnnotation(TypeDeclaration.ATTR_TIMESTAMP);
String timestamp = (annotationDescr != null) ? annotationDescr.getSingleValue() : null;
- if ( timestamp != null ) {
- type.setTimestampAttribute( timestamp );
+ if (timestamp != null) {
+ type.setTimestampAttribute(timestamp);
Package pkg = pkgRegistry.getPackage();
- MVELDialect dialect = (MVELDialect) pkgRegistry.getDialectCompiletimeRegistry().getDialect( "mvel" );
+ MVELDialect dialect = (MVELDialect) pkgRegistry.getDialectCompiletimeRegistry().getDialect("mvel");
PackageBuildContext context = new PackageBuildContext();
- context.init( this, pkg, typeDescr, pkgRegistry.getDialectCompiletimeRegistry(), dialect, null );
- if ( !type.isTypesafe() ) {
- context.setTypesafe( false );
+ context.init(this, pkg, typeDescr, pkgRegistry.getDialectCompiletimeRegistry(), dialect, null);
+ if (!type.isTypesafe()) {
+ context.setTypesafe(false);
}
MVELAnalysisResult results = (MVELAnalysisResult)
- context.getDialect().analyzeExpression( context,
- typeDescr,
- timestamp,
- new BoundIdentifiers( Collections.EMPTY_MAP,
- Collections.EMPTY_MAP,
- Collections.EMPTY_MAP,
- type.getTypeClass() ) );
-
- if ( results != null ) {
- InternalReadAccessor reader = pkg.getClassFieldAccessorStore().getMVELReader( ClassUtils.getPackage( type.getTypeClass() ),
- type.getTypeClass().getName(),
- timestamp,
- type.isTypesafe(),
- results.getReturnType() );
-
- MVELDialectRuntimeData data = (MVELDialectRuntimeData) pkg.getDialectRuntimeRegistry().getDialectData( "mvel" );
- data.addCompileable( (MVELCompileable) reader );
- ((MVELCompileable) reader).compile( data );
- type.setTimestampExtractor( reader );
+ context.getDialect().analyzeExpression(context,
+ typeDescr,
+ timestamp,
+ new BoundIdentifiers(Collections.EMPTY_MAP,
+ Collections.EMPTY_MAP,
+ Collections.EMPTY_MAP,
+ type.getTypeClass()));
+
+ if (results != null) {
+ InternalReadAccessor reader = pkg.getClassFieldAccessorStore().getMVELReader(ClassUtils.getPackage(type.getTypeClass()),
+ type.getTypeClass().getName(),
+ timestamp,
+ type.isTypesafe(),
+ results.getReturnType());
+
+ MVELDialectRuntimeData data = (MVELDialectRuntimeData) pkg.getDialectRuntimeRegistry().getDialectData("mvel");
+ data.addCompileable((MVELCompileable) reader);
+ ((MVELCompileable) reader).compile(data);
+ type.setTimestampExtractor(reader);
} else {
- this.results.add( new TypeDeclarationError( typeDescr,
- "Error creating field accessors for timestamp field '" + timestamp +
- "' for type '" +
- type.getTypeName() +
- "'" ) );
+ this.results.add(new TypeDeclarationError(typeDescr,
+ "Error creating field accessors for timestamp field '" + timestamp +
+ "' for type '" +
+ type.getTypeName() +
+ "'"));
}
}
- annotationDescr = typeDescr.getAnnotation( TypeDeclaration.ATTR_DURATION );
+ annotationDescr = typeDescr.getAnnotation(TypeDeclaration.ATTR_DURATION);
String duration = (annotationDescr != null) ? annotationDescr.getSingleValue() : null;
- if ( duration != null ) {
- type.setDurationAttribute( duration );
+ if (duration != null) {
+ type.setDurationAttribute(duration);
Package pkg = pkgRegistry.getPackage();
- MVELDialect dialect = (MVELDialect) pkgRegistry.getDialectCompiletimeRegistry().getDialect( "mvel" );
+ MVELDialect dialect = (MVELDialect) pkgRegistry.getDialectCompiletimeRegistry().getDialect("mvel");
PackageBuildContext context = new PackageBuildContext();
- context.init( this, pkg, typeDescr, pkgRegistry.getDialectCompiletimeRegistry(), dialect, null );
- if ( !type.isTypesafe() ) {
- context.setTypesafe( false );
+ context.init(this, pkg, typeDescr, pkgRegistry.getDialectCompiletimeRegistry(), dialect, null);
+ if (!type.isTypesafe()) {
+ context.setTypesafe(false);
}
MVELAnalysisResult results = (MVELAnalysisResult)
- context.getDialect().analyzeExpression( context,
- typeDescr,
- duration,
- new BoundIdentifiers( Collections.EMPTY_MAP,
- Collections.EMPTY_MAP,
- Collections.EMPTY_MAP,
- type.getTypeClass() ) );
-
- if ( results != null ) {
- InternalReadAccessor reader = pkg.getClassFieldAccessorStore().getMVELReader( ClassUtils.getPackage( type.getTypeClass() ),
- type.getTypeClass().getName(),
- duration,
- type.isTypesafe(),
- results.getReturnType() );
-
- MVELDialectRuntimeData data = (MVELDialectRuntimeData) pkg.getDialectRuntimeRegistry().getDialectData( "mvel" );
- data.addCompileable( (MVELCompileable) reader );
- ((MVELCompileable) reader).compile( data );
- type.setDurationExtractor( reader );
+ context.getDialect().analyzeExpression(context,
+ typeDescr,
+ duration,
+ new BoundIdentifiers(Collections.EMPTY_MAP,
+ Collections.EMPTY_MAP,
+ Collections.EMPTY_MAP,
+ type.getTypeClass()));
+
+ if (results != null) {
+ InternalReadAccessor reader = pkg.getClassFieldAccessorStore().getMVELReader(ClassUtils.getPackage(type.getTypeClass()),
+ type.getTypeClass().getName(),
+ duration,
+ type.isTypesafe(),
+ results.getReturnType());
+
+ MVELDialectRuntimeData data = (MVELDialectRuntimeData) pkg.getDialectRuntimeRegistry().getDialectData("mvel");
+ data.addCompileable((MVELCompileable) reader);
+ ((MVELCompileable) reader).compile(data);
+ type.setDurationExtractor(reader);
} else {
- this.results.add( new TypeDeclarationError( typeDescr,
- "Error processing @duration for TypeDeclaration '" + type.getFullName() +
- "': cannot access the field '" + duration + "'" ) );
+ this.results.add(new TypeDeclarationError(typeDescr,
+ "Error processing @duration for TypeDeclaration '" + type.getFullName() +
+ "': cannot access the field '" + duration + "'"));
}
}
- annotationDescr = typeDescr.getAnnotation( TypeDeclaration.ATTR_EXPIRE );
+ annotationDescr = typeDescr.getAnnotation(TypeDeclaration.ATTR_EXPIRE);
String expiration = (annotationDescr != null) ? annotationDescr.getSingleValue() : null;
- if ( expiration != null ) {
- if ( timeParser == null ) {
+ if (expiration != null) {
+ if (timeParser == null) {
timeParser = new TimeIntervalParser();
}
- type.setExpirationOffset( timeParser.parse( expiration )[0] );
+ type.setExpirationOffset(timeParser.parse(expiration)[0]);
}
- boolean dynamic = typeDescr.getAnnotationNames().contains( TypeDeclaration.ATTR_PROP_CHANGE_SUPPORT );
- type.setDynamic( dynamic );
+ boolean dynamic = typeDescr.getAnnotationNames().contains(TypeDeclaration.ATTR_PROP_CHANGE_SUPPORT);
+ type.setDynamic(dynamic);
- PropertySpecificOption propertySpecificOption = configuration.getOption( PropertySpecificOption.class );
- boolean propertyReactive = propertySpecificOption.isPropSpecific( typeDescr.getAnnotationNames().contains( TypeDeclaration.ATTR_PROP_SPECIFIC ),
- typeDescr.getAnnotationNames().contains( TypeDeclaration.ATTR_NOT_PROP_SPECIFIC ) );
+ PropertySpecificOption propertySpecificOption = configuration.getOption(PropertySpecificOption.class);
+ boolean propertyReactive = propertySpecificOption.isPropSpecific(typeDescr.getAnnotationNames().contains(TypeDeclaration.ATTR_PROP_SPECIFIC),
+ typeDescr.getAnnotationNames().contains(TypeDeclaration.ATTR_NOT_PROP_SPECIFIC));
- setPropertyReactive( typeDescr.getResource(), type, propertyReactive );
+ setPropertyReactive(typeDescr.getResource(), type, propertyReactive);
- if ( type.isValid() ) {
+ if (type.isValid()) {
// prefer definitions where possible
- if ( type.getNature() == TypeDeclaration.Nature.DEFINITION ) {
- pkgRegistry.getPackage().addTypeDeclaration( type );
+ if (type.getNature() == TypeDeclaration.Nature.DEFINITION) {
+ pkgRegistry.getPackage().addTypeDeclaration(type);
} else {
- TypeDeclaration oldType = pkgRegistry.getPackage().getTypeDeclaration( type.getTypeName() );
- if ( oldType == null ) {
- pkgRegistry.getPackage().addTypeDeclaration( type );
+ TypeDeclaration oldType = pkgRegistry.getPackage().getTypeDeclaration(type.getTypeName());
+ if (oldType == null) {
+ pkgRegistry.getPackage().addTypeDeclaration(type);
} else {
- if ( type.getRole() == TypeDeclaration.Role.EVENT ) {
- oldType.setRole( TypeDeclaration.Role.EVENT );
+ if (type.getRole() == TypeDeclaration.Role.EVENT) {
+ oldType.setRole(TypeDeclaration.Role.EVENT);
}
- if ( type.isPropertyReactive() ) {
- oldType.setPropertyReactive( true );
+ if (type.isPropertyReactive()) {
+ oldType.setPropertyReactive(true);
}
}
}
@@ -2599,47 +2639,47 @@ private boolean processTypeFields(PackageRegistry pkgRegistry,
}
private void setPropertyReactive(Resource resource,
- TypeDeclaration type,
- boolean propertyReactive) {
- if ( propertyReactive && type.getSettableProperties().size() >= 64 ) {
- this.results.add( new DisabledPropertyReactiveWarning( resource, type.getTypeName() ) );
- type.setPropertyReactive( false );
+ TypeDeclaration type,
+ boolean propertyReactive) {
+ if (propertyReactive && type.getSettableProperties().size() >= 64) {
+ this.results.add(new DisabledPropertyReactiveWarning(resource, type.getTypeName()));
+ type.setPropertyReactive(false);
} else {
- type.setPropertyReactive( propertyReactive );
+ type.setPropertyReactive(propertyReactive);
}
}
private void updateTraitDefinition(TypeDeclaration type,
- Class concrete) {
+ Class concrete) {
try {
- ClassFieldInspector inspector = new ClassFieldInspector( concrete );
+ ClassFieldInspector inspector = new ClassFieldInspector(concrete);
Map<String, Method> methods = inspector.getGetterMethods();
Map<String, Method> setters = inspector.getSetterMethods();
int j = 0;
- for ( String fieldName : methods.keySet() ) {
- if ( "core".equals( fieldName ) || "fields".equals( fieldName ) ) {
+ for (String fieldName : methods.keySet()) {
+ if ("core".equals(fieldName) || "fields".equals(fieldName)) {
continue;
}
- if ( !inspector.isNonGetter( fieldName ) && setters.keySet().contains( fieldName ) ) {
+ if (!inspector.isNonGetter(fieldName) && setters.keySet().contains(fieldName)) {
- Class ret = methods.get( fieldName ).getReturnType();
+ Class ret = methods.get(fieldName).getReturnType();
FieldDefinition field = new FieldDefinition();
- field.setName( fieldName );
- field.setTypeName( ret.getName() );
- field.setIndex( j++ );
- type.getTypeClassDef().addField( field );
+ field.setName(fieldName);
+ field.setTypeName(ret.getName());
+ field.setIndex(j++);
+ type.getTypeClassDef().addField(field);
}
}
Set<String> interfaces = new HashSet<String>();
- Collections.addAll( interfaces, type.getTypeClassDef().getInterfaces() );
- for ( Class iKlass : concrete.getInterfaces() ) {
- interfaces.add( iKlass.getName() );
+ Collections.addAll(interfaces, type.getTypeClassDef().getInterfaces());
+ for (Class iKlass : concrete.getInterfaces()) {
+ interfaces.add(iKlass.getName());
}
- type.getTypeClassDef().setInterfaces( interfaces.toArray( new String[interfaces.size()] ) );
+ type.getTypeClassDef().setInterfaces(interfaces.toArray(new String[interfaces.size()]));
- } catch ( IOException e ) {
+ } catch (IOException e) {
e.printStackTrace();
}
@@ -2653,18 +2693,18 @@ private void updateTraitDefinition(TypeDeclaration type,
* @return
*/
private boolean isNovelClass(AbstractClassTypeDeclarationDescr typeDescr) {
- return getExistingDeclarationClass( typeDescr ) == null;
+ return getExistingDeclarationClass(typeDescr) == null;
}
- private Class< ? > getExistingDeclarationClass(AbstractClassTypeDeclarationDescr typeDescr) {
- PackageRegistry reg = this.pkgRegistryMap.get( typeDescr.getNamespace() );
- if ( reg == null ) {
+ private Class<?> getExistingDeclarationClass(AbstractClassTypeDeclarationDescr typeDescr) {
+ PackageRegistry reg = this.pkgRegistryMap.get(typeDescr.getNamespace());
+ if (reg == null) {
return null;
}
String availableName = typeDescr.getType().getFullName();
try {
- return reg.getTypeResolver().resolveType( availableName );
- } catch ( ClassNotFoundException e ) {
+ return reg.getTypeResolver().resolveType(availableName);
+ } catch (ClassNotFoundException e) {
return null;
}
}
@@ -2680,30 +2720,30 @@ private boolean isNovelClass(AbstractClassTypeDeclarationDescr typeDescr) {
* @return
*/
private Class resolveAnnotation(String annotation,
- TypeResolver resolver) {
+ TypeResolver resolver) {
// do not waste time with @format
- if ( TypeDeclaration.Format.ID.equals( annotation ) ) {
+ if (TypeDeclaration.Format.ID.equals(annotation)) {
return null;
}
// known conflicting annotation
- if ( TypeDeclaration.ATTR_CLASS.equals( annotation ) ) {
+ if (TypeDeclaration.ATTR_CLASS.equals(annotation)) {
return null;
}
try {
- return resolver.resolveType( annotation.indexOf( '.' ) < 0 ?
- annotation.substring( 0, 1 ).toUpperCase() + annotation.substring( 1 ) :
- annotation );
- } catch ( ClassNotFoundException e ) {
+ return resolver.resolveType(annotation.indexOf('.') < 0 ?
+ annotation.substring(0, 1).toUpperCase() + annotation.substring(1) :
+ annotation);
+ } catch (ClassNotFoundException e) {
// internal annotation, or annotation which can't be resolved.
- if ( TypeDeclaration.Role.ID.equals( annotation ) ) {
+ if (TypeDeclaration.Role.ID.equals(annotation)) {
return Role.class;
}
- if ( "key".equals( annotation ) ) {
+ if ("key".equals(annotation)) {
return Key.class;
}
- if ( "position".equals( annotation ) ) {
+ if ("position".equals(annotation)) {
return Position.class;
}
return null;
@@ -2725,22 +2765,22 @@ private Class resolveAnnotation(String annotation,
* @throws NoSuchFieldException
*/
private void buildFieldAccessors(final TypeDeclaration type,
- final PackageRegistry pkgRegistry) throws SecurityException,
- IllegalArgumentException,
- InstantiationException,
- IllegalAccessException,
- IOException,
- IntrospectionException,
- ClassNotFoundException,
- NoSuchMethodException,
- InvocationTargetException,
- NoSuchFieldException {
+ final PackageRegistry pkgRegistry) throws SecurityException,
+ IllegalArgumentException,
+ InstantiationException,
+ IllegalAccessException,
+ IOException,
+ IntrospectionException,
+ ClassNotFoundException,
+ NoSuchMethodException,
+ InvocationTargetException,
+ NoSuchFieldException {
ClassDefinition cd = type.getTypeClassDef();
ClassFieldAccessorStore store = pkgRegistry.getPackage().getClassFieldAccessorStore();
- for ( FieldDefinition attrDef : cd.getFieldsDefinitions() ) {
- ClassFieldAccessor accessor = store.getAccessor( cd.getDefinedClass().getName(),
- attrDef.getName() );
- attrDef.setReadWriteAccessor( accessor );
+ for (FieldDefinition attrDef : cd.getFieldsDefinitions()) {
+ ClassFieldAccessor accessor = store.getAccessor(cd.getDefinedClass().getName(),
+ attrDef.getName());
+ attrDef.setReadWriteAccessor(accessor);
}
}
@@ -2749,201 +2789,201 @@ private void buildFieldAccessors(final TypeDeclaration type,
* everything is using.
*/
private void generateDeclaredBean(AbstractClassTypeDeclarationDescr typeDescr,
- TypeDeclaration type,
- PackageRegistry pkgRegistry,
- List<TypeDefinition> unresolvedTypeDefinitions) {
+ TypeDeclaration type,
+ PackageRegistry pkgRegistry,
+ List<TypeDefinition> unresolvedTypeDefinitions) {
// extracts type, supertype and interfaces
String fullName = typeDescr.getType().getFullName();
- if ( type.getKind().equals( TypeDeclaration.Kind.CLASS ) ) {
+ if (type.getKind().equals(TypeDeclaration.Kind.CLASS)) {
TypeDeclarationDescr tdescr = (TypeDeclarationDescr) typeDescr;
- if ( tdescr.getSuperTypes().size() > 1 ) {
- this.results.add( new TypeDeclarationError( typeDescr, "Declared class " + fullName + " - has more than one supertype;" ) );
+ if (tdescr.getSuperTypes().size() > 1) {
+ this.results.add(new TypeDeclarationError(typeDescr, "Declared class " + fullName + " - has more than one supertype;"));
return;
- } else if ( tdescr.getSuperTypes().isEmpty() ) {
- tdescr.addSuperType( "java.lang.Object" );
+ } else if (tdescr.getSuperTypes().isEmpty()) {
+ tdescr.addSuperType("java.lang.Object");
}
}
- AnnotationDescr traitableAnn = typeDescr.getAnnotation( Traitable.class.getSimpleName() );
+ AnnotationDescr traitableAnn = typeDescr.getAnnotation(Traitable.class.getSimpleName());
boolean traitable = traitableAnn != null;
String[] fullSuperTypes = new String[typeDescr.getSuperTypes().size() + 1];
int j = 0;
- for ( QualifiedName qname : typeDescr.getSuperTypes() ) {
+ for (QualifiedName qname : typeDescr.getSuperTypes()) {
fullSuperTypes[j++] = qname.getFullName();
}
fullSuperTypes[j] = Thing.class.getName();
List<String> interfaceList = new ArrayList<String>();
- interfaceList.add( traitable ? Externalizable.class.getName() : Serializable.class.getName() );
- if ( traitable ) {
- interfaceList.add( TraitableBean.class.getName() );
+ interfaceList.add(traitable ? Externalizable.class.getName() : Serializable.class.getName());
+ if (traitable) {
+ interfaceList.add(TraitableBean.class.getName());
}
- String[] interfaces = interfaceList.toArray( new String[interfaceList.size()] );
+ String[] interfaces = interfaceList.toArray(new String[interfaceList.size()]);
// prepares a class definition
ClassDefinition def;
- switch ( type.getKind() ) {
- case TRAIT :
- def = new ClassDefinition( fullName,
- "java.lang.Object",
- fullSuperTypes );
+ switch (type.getKind()) {
+ case TRAIT:
+ def = new ClassDefinition(fullName,
+ "java.lang.Object",
+ fullSuperTypes);
break;
- case ENUM :
- def = new EnumClassDefinition( fullName,
- fullSuperTypes[0],
- null );
+ case ENUM:
+ def = new EnumClassDefinition(fullName,
+ fullSuperTypes[0],
+ null);
break;
- case CLASS :
- default :
- def = new ClassDefinition( fullName,
- fullSuperTypes[0],
- interfaces );
- def.setTraitable( traitable, traitableAnn != null &&
- traitableAnn.getValue( "logical" ) != null &&
- Boolean.valueOf( traitableAnn.getValue( "logical" ) ) );
- }
-
- for ( String annotationName : typeDescr.getAnnotationNames() ) {
- Class annotation = resolveAnnotation( annotationName,
- pkgRegistry.getTypeResolver() );
- if ( annotation != null ) {
+ case CLASS:
+ default:
+ def = new ClassDefinition(fullName,
+ fullSuperTypes[0],
+ interfaces);
+ def.setTraitable(traitable, traitableAnn != null &&
+ traitableAnn.getValue("logical") != null &&
+ Boolean.valueOf(traitableAnn.getValue("logical")));
+ }
+
+ for (String annotationName : typeDescr.getAnnotationNames()) {
+ Class annotation = resolveAnnotation(annotationName,
+ pkgRegistry.getTypeResolver());
+ if (annotation != null) {
try {
- AnnotationDefinition annotationDefinition = AnnotationDefinition.build( annotation,
- typeDescr.getAnnotations().get( annotationName ).getValueMap(),
- pkgRegistry.getTypeResolver() );
- def.addAnnotation( annotationDefinition );
- } catch ( NoSuchMethodException nsme ) {
- this.results.add( new TypeDeclarationError( typeDescr,
- "Annotated type " + fullName +
- " - undefined property in @annotation " +
- annotationName + ": " +
- nsme.getMessage() + ";" ) );
+ AnnotationDefinition annotationDefinition = AnnotationDefinition.build(annotation,
+ typeDescr.getAnnotations().get(annotationName).getValueMap(),
+ pkgRegistry.getTypeResolver());
+ def.addAnnotation(annotationDefinition);
+ } catch (NoSuchMethodException nsme) {
+ this.results.add(new TypeDeclarationError(typeDescr,
+ "Annotated type " + fullName +
+ " - undefined property in @annotation " +
+ annotationName + ": " +
+ nsme.getMessage() + ";"));
}
}
if (annotation == null || annotation == Role.class) {
- def.addMetaData( annotationName, typeDescr.getAnnotation( annotationName ).getSingleValue() );
+ def.addMetaData(annotationName, typeDescr.getAnnotation(annotationName).getSingleValue());
}
}
// add enum literals, if appropriate
- if ( type.getKind() == TypeDeclaration.Kind.ENUM ) {
- for ( EnumLiteralDescr lit : ((EnumDeclarationDescr) typeDescr).getLiterals() ) {
+ if (type.getKind() == TypeDeclaration.Kind.ENUM) {
+ for (EnumLiteralDescr lit : ((EnumDeclarationDescr) typeDescr).getLiterals()) {
((EnumClassDefinition) def).addLiteral(
- new EnumLiteralDefinition( lit.getName(), lit.getConstructorArgs() )
+ new EnumLiteralDefinition(lit.getName(), lit.getConstructorArgs())
);
}
}
// fields definitions are created. will be used by subclasses, if any.
// Fields are SORTED in the process
- if ( !typeDescr.getFields().isEmpty() ) {
- PriorityQueue<FieldDefinition> fieldDefs = sortFields( typeDescr.getFields(),
- pkgRegistry );
+ if (!typeDescr.getFields().isEmpty()) {
+ PriorityQueue<FieldDefinition> fieldDefs = sortFields(typeDescr.getFields(),
+ pkgRegistry);
int n = fieldDefs.size();
- for ( int k = 0; k < n; k++ ) {
+ for (int k = 0; k < n; k++) {
FieldDefinition fld = fieldDefs.poll();
- if ( unresolvedTypeDefinitions != null ) {
- for ( TypeDefinition typeDef : unresolvedTypeDefinitions ) {
- if ( fld.getTypeName().equals( typeDef.getTypeClassName() ) ) {
- fld.setRecursive( true );
+ if (unresolvedTypeDefinitions != null) {
+ for (TypeDefinition typeDef : unresolvedTypeDefinitions) {
+ if (fld.getTypeName().equals(typeDef.getTypeClassName())) {
+ fld.setRecursive(true);
break;
}
}
}
- fld.setIndex( k );
- def.addField( fld );
+ fld.setIndex(k);
+ def.addField(fld);
}
}
// check whether it is necessary to build the class or not
- Class< ? > existingDeclarationClass = getExistingDeclarationClass( typeDescr );
- type.setNovel( existingDeclarationClass == null );
+ Class<?> existingDeclarationClass = getExistingDeclarationClass(typeDescr);
+ type.setNovel(existingDeclarationClass == null);
// attach the class definition, it will be completed later
- type.setTypeClassDef( def );
+ type.setTypeClassDef(def);
//if is not new, search the already existing declaration and
//compare them o see if they are at least compatibles
- if ( !type.isNovel() ) {
- TypeDeclaration previousTypeDeclaration = this.pkgRegistryMap.get( typeDescr.getNamespace() ).getPackage().getTypeDeclaration( typeDescr.getTypeName() );
+ if (!type.isNovel()) {
+ TypeDeclaration previousTypeDeclaration = this.pkgRegistryMap.get(typeDescr.getNamespace()).getPackage().getTypeDeclaration(typeDescr.getTypeName());
try {
- if ( !type.getTypeClassDef().getFields().isEmpty() ) {
+ if (!type.getTypeClassDef().getFields().isEmpty()) {
//since the declaration defines one or more fields, it is a DEFINITION
- type.setNature( TypeDeclaration.Nature.DEFINITION );
+ type.setNature(TypeDeclaration.Nature.DEFINITION);
} else {
//The declaration doesn't define any field, it is a DECLARATION
- type.setNature( TypeDeclaration.Nature.DECLARATION );
+ type.setNature(TypeDeclaration.Nature.DECLARATION);
}
//if there is no previous declaration, then the original declaration was a POJO
//to the behavior previous these changes
- if ( previousTypeDeclaration == null ) {
+ if (previousTypeDeclaration == null) {
// new declarations of a POJO can't declare new fields,
// except if the POJO was previously generated/compiled and saved into the kjar
- if ( !configuration.isPreCompiled() &&
- !GeneratedFact.class.isAssignableFrom( existingDeclarationClass ) && !type.getTypeClassDef().getFields().isEmpty() ) {
- type.setValid( false );
- this.results.add( new TypeDeclarationError( typeDescr, "New declaration of " + typeDescr.getType().getFullName()
- + " can't declare new fields" ) );
+ if (!configuration.isPreCompiled() &&
+ !GeneratedFact.class.isAssignableFrom(existingDeclarationClass) && !type.getTypeClassDef().getFields().isEmpty()) {
+ type.setValid(false);
+ this.results.add(new TypeDeclarationError(typeDescr, "New declaration of " + typeDescr.getType().getFullName()
+ + " can't declare new fields"));
}
} else {
- int typeComparisonResult = this.compareTypeDeclarations( previousTypeDeclaration, type );
+ int typeComparisonResult = this.compareTypeDeclarations(previousTypeDeclaration, type);
- if ( typeComparisonResult < 0 ) {
+ if (typeComparisonResult < 0) {
//oldDeclaration is "less" than newDeclaration -> error
- this.results.add( new TypeDeclarationError( typeDescr, typeDescr.getType().getFullName()
- + " declares more fields than the already existing version" ) );
- type.setValid( false );
- } else if ( typeComparisonResult > 0 && !type.getTypeClassDef().getFields().isEmpty() ) {
+ this.results.add(new TypeDeclarationError(typeDescr, typeDescr.getType().getFullName()
+ + " declares more fields than the already existing version"));
+ type.setValid(false);
+ } else if (typeComparisonResult > 0 && !type.getTypeClassDef().getFields().isEmpty()) {
//oldDeclaration is "grater" than newDeclaration -> error
- this.results.add( new TypeDeclarationError( typeDescr, typeDescr.getType().getFullName()
- + " declares less fields than the already existing version" ) );
- type.setValid( false );
+ this.results.add(new TypeDeclarationError(typeDescr, typeDescr.getType().getFullName()
+ + " declares less fields than the already existing version"));
+ type.setValid(false);
}
//if they are "equal" -> no problem
// in the case of a declaration, we need to copy all the
// fields present in the previous declaration
- if ( type.getNature() == TypeDeclaration.Nature.DECLARATION ) {
- this.mergeTypeDeclarations( previousTypeDeclaration, type );
+ if (type.getNature() == TypeDeclaration.Nature.DECLARATION) {
+ this.mergeTypeDeclarations(previousTypeDeclaration, type);
}
}
- } catch ( IncompatibleClassChangeError error ) {
+ } catch (IncompatibleClassChangeError error) {
//if the types are incompatible -> error
- this.results.add( new TypeDeclarationError( typeDescr, error.getMessage() ) );
+ this.results.add(new TypeDeclarationError(typeDescr, error.getMessage()));
}
} else {
//if the declaration is novel, then it is a DEFINITION
- type.setNature( TypeDeclaration.Nature.DEFINITION );
+ type.setNature(TypeDeclaration.Nature.DEFINITION);
}
- generateDeclaredBean( typeDescr,
- type,
- pkgRegistry,
- expandImportsInFieldInitExpr( def, pkgRegistry ) );
+ generateDeclaredBean(typeDescr,
+ type,
+ pkgRegistry,
+ expandImportsInFieldInitExpr(def, pkgRegistry));
}
private ClassDefinition expandImportsInFieldInitExpr(ClassDefinition def,
- PackageRegistry pkgRegistry) {
+ PackageRegistry pkgRegistry) {
TypeResolver typeResolver = pkgRegistry.getPackage().getTypeResolver();
- for ( FieldDefinition field : def.getFieldsDefinitions() ) {
- field.setInitExpr( rewriteInitExprWithImports( field.getInitExpr(), typeResolver ) );
+ for (FieldDefinition field : def.getFieldsDefinitions()) {
+ field.setInitExpr(rewriteInitExprWithImports(field.getInitExpr(), typeResolver));
}
return def;
}
private String rewriteInitExprWithImports(String expr,
- TypeResolver typeResolver) {
- if ( expr == null ) {
+ TypeResolver typeResolver) {
+ if (expr == null) {
return null;
}
StringBuilder sb = new StringBuilder();
@@ -2951,178 +2991,178 @@ private String rewriteInitExprWithImports(String expr,
boolean inTypeName = false;
boolean afterDot = false;
int typeStart = 0;
- for ( int i = 0; i < expr.length(); i++ ) {
- char ch = expr.charAt( i );
- if ( Character.isJavaIdentifierStart( ch ) ) {
- if ( !inTypeName && !inQuotes && !afterDot ) {
+ for (int i = 0; i < expr.length(); i++) {
+ char ch = expr.charAt(i);
+ if (Character.isJavaIdentifierStart(ch)) {
+ if (!inTypeName && !inQuotes && !afterDot) {
typeStart = i;
inTypeName = true;
}
- } else if ( !Character.isJavaIdentifierPart( ch ) ) {
- if ( ch == '"' ) {
+ } else if (!Character.isJavaIdentifierPart(ch)) {
+ if (ch == '"') {
inQuotes = !inQuotes;
- } else if ( ch == '.' && !inQuotes ) {
+ } else if (ch == '.' && !inQuotes) {
afterDot = true;
- } else if ( !Character.isSpaceChar( ch ) ) {
+ } else if (!Character.isSpaceChar(ch)) {
afterDot = false;
}
- if ( inTypeName ) {
+ if (inTypeName) {
inTypeName = false;
- String type = expr.substring( typeStart, i );
- sb.append( getFullTypeName( type, typeResolver ) );
+ String type = expr.substring(typeStart, i);
+ sb.append(getFullTypeName(type, typeResolver));
}
}
- if ( !inTypeName ) {
- sb.append( ch );
+ if (!inTypeName) {
+ sb.append(ch);
}
}
- if ( inTypeName ) {
- String type = expr.substring( typeStart );
- sb.append( getFullTypeName( type, typeResolver ) );
+ if (inTypeName) {
+ String type = expr.substring(typeStart);
+ sb.append(getFullTypeName(type, typeResolver));
}
return sb.toString();
}
private String getFullTypeName(String type,
- TypeResolver typeResolver) {
- if ( type.equals( "new" ) ) {
+ TypeResolver typeResolver) {
+ if (type.equals("new")) {
return type;
}
try {
- return typeResolver.getFullTypeName( type );
- } catch ( ClassNotFoundException e ) {
+ return typeResolver.getFullTypeName(type);
+ } catch (ClassNotFoundException e) {
return type;
}
}
private void generateDeclaredBean(AbstractClassTypeDeclarationDescr typeDescr,
- TypeDeclaration type,
- PackageRegistry pkgRegistry,
- ClassDefinition def) {
-
- if ( typeDescr.getAnnotation( Traitable.class.getSimpleName() ) != null
- || (!type.getKind().equals( TypeDeclaration.Kind.TRAIT ) &&
- pkgRegistryMap.containsKey( def.getSuperClass() ) &&
- pkgRegistryMap.get( def.getSuperClass() ).getTraitRegistry().getTraitables().containsKey( def.getSuperClass() )
- ) ) {
- if ( !isNovelClass( typeDescr ) ) {
+ TypeDeclaration type,
+ PackageRegistry pkgRegistry,
+ ClassDefinition def) {
+
+ if (typeDescr.getAnnotation(Traitable.class.getSimpleName()) != null
+ || (!type.getKind().equals(TypeDeclaration.Kind.TRAIT) &&
+ pkgRegistryMap.containsKey(def.getSuperClass()) &&
+ pkgRegistryMap.get(def.getSuperClass()).getTraitRegistry().getTraitables().containsKey(def.getSuperClass())
+ )) {
+ if (!isNovelClass(typeDescr)) {
try {
- PackageRegistry reg = this.pkgRegistryMap.get( typeDescr.getNamespace() );
+ PackageRegistry reg = this.pkgRegistryMap.get(typeDescr.getNamespace());
String availableName = typeDescr.getType().getFullName();
- Class< ? > resolvedType = reg.getTypeResolver().resolveType( availableName );
- updateTraitDefinition( type,
- resolvedType );
- } catch ( ClassNotFoundException cnfe ) {
+ Class<?> resolvedType = reg.getTypeResolver().resolveType(availableName);
+ updateTraitDefinition(type,
+ resolvedType);
+ } catch (ClassNotFoundException cnfe) {
// we already know the class exists
}
}
- pkgRegistry.getTraitRegistry().addTraitable( def );
- } else if ( type.getKind().equals( TypeDeclaration.Kind.TRAIT )
- || typeDescr.getAnnotation( Trait.class.getSimpleName() ) != null ) {
+ pkgRegistry.getTraitRegistry().addTraitable(def);
+ } else if (type.getKind().equals(TypeDeclaration.Kind.TRAIT)
+ || typeDescr.getAnnotation(Trait.class.getSimpleName()) != null) {
- if ( !type.isNovel() ) {
+ if (!type.isNovel()) {
try {
- PackageRegistry reg = this.pkgRegistryMap.get( typeDescr.getNamespace() );
+ PackageRegistry reg = this.pkgRegistryMap.get(typeDescr.getNamespace());
String availableName = typeDescr.getType().getFullName();
- Class< ? > resolvedType = reg.getTypeResolver().resolveType( availableName );
- if ( !Thing.class.isAssignableFrom( resolvedType ) ) {
- updateTraitDefinition( type,
- resolvedType );
+ Class<?> resolvedType = reg.getTypeResolver().resolveType(availableName);
+ if (!Thing.class.isAssignableFrom(resolvedType)) {
+ updateTraitDefinition(type,
+ resolvedType);
String target = typeDescr.getTypeName() + TraitFactory.SUFFIX;
TypeDeclarationDescr tempDescr = new TypeDeclarationDescr();
- tempDescr.setNamespace( typeDescr.getNamespace() );
- tempDescr.setFields( typeDescr.getFields() );
- tempDescr.setType( target,
- typeDescr.getNamespace() );
- tempDescr.addSuperType( typeDescr.getType() );
- TypeDeclaration tempDeclr = new TypeDeclaration( target );
- tempDeclr.setKind( TypeDeclaration.Kind.TRAIT );
- tempDeclr.setTypesafe( type.isTypesafe() );
- tempDeclr.setNovel( true );
- tempDeclr.setTypeClassName( tempDescr.getType().getFullName() );
- tempDeclr.setResource( type.getResource() );
-
- ClassDefinition tempDef = new ClassDefinition( target );
- tempDef.setClassName( tempDescr.getType().getFullName() );
- tempDef.setTraitable( false );
- for ( FieldDefinition fld : def.getFieldsDefinitions() ) {
- tempDef.addField( fld );
+ tempDescr.setNamespace(typeDescr.getNamespace());
+ tempDescr.setFields(typeDescr.getFields());
+ tempDescr.setType(target,
+ typeDescr.getNamespace());
+ tempDescr.addSuperType(typeDescr.getType());
+ TypeDeclaration tempDeclr = new TypeDeclaration(target);
+ tempDeclr.setKind(TypeDeclaration.Kind.TRAIT);
+ tempDeclr.setTypesafe(type.isTypesafe());
+ tempDeclr.setNovel(true);
+ tempDeclr.setTypeClassName(tempDescr.getType().getFullName());
+ tempDeclr.setResource(type.getResource());
+
+ ClassDefinition tempDef = new ClassDefinition(target);
+ tempDef.setClassName(tempDescr.getType().getFullName());
+ tempDef.setTraitable(false);
+ for (FieldDefinition fld : def.getFieldsDefinitions()) {
+ tempDef.addField(fld);
}
- tempDef.setInterfaces( def.getInterfaces() );
- tempDef.setSuperClass( def.getClassName() );
- tempDef.setDefinedClass( resolvedType );
- tempDef.setAbstrakt( true );
- tempDeclr.setTypeClassDef( tempDef );
-
- type.setKind( TypeDeclaration.Kind.CLASS );
-
- generateDeclaredBean( tempDescr,
- tempDeclr,
- pkgRegistry,
- tempDef );
+ tempDef.setInterfaces(def.getInterfaces());
+ tempDef.setSuperClass(def.getClassName());
+ tempDef.setDefinedClass(resolvedType);
+ tempDef.setAbstrakt(true);
+ tempDeclr.setTypeClassDef(tempDef);
+
+ type.setKind(TypeDeclaration.Kind.CLASS);
+
+ generateDeclaredBean(tempDescr,
+ tempDeclr,
+ pkgRegistry,
+ tempDef);
try {
- Class< ? > clazz = pkgRegistry.getTypeResolver().resolveType( tempDescr.getType().getFullName() );
- tempDeclr.setTypeClass( clazz );
- } catch ( ClassNotFoundException cnfe ) {
- this.results.add( new TypeDeclarationError( typeDescr,
- "Internal Trait extension Class '" + target +
- "' could not be generated correctly'" ) );
+ Class<?> clazz = pkgRegistry.getTypeResolver().resolveType(tempDescr.getType().getFullName());
+ tempDeclr.setTypeClass(clazz);
+ } catch (ClassNotFoundException cnfe) {
+ this.results.add(new TypeDeclarationError(typeDescr,
+ "Internal Trait extension Class '" + target +
+ "' could not be generated correctly'"));
} finally {
- pkgRegistry.getPackage().addTypeDeclaration( tempDeclr );
+ pkgRegistry.getPackage().addTypeDeclaration(tempDeclr);
}
} else {
- updateTraitDefinition( type,
- resolvedType );
- pkgRegistry.getTraitRegistry().addTrait( def );
+ updateTraitDefinition(type,
+ resolvedType);
+ pkgRegistry.getTraitRegistry().addTrait(def);
}
- } catch ( ClassNotFoundException cnfe ) {
+ } catch (ClassNotFoundException cnfe) {
// we already know the class exists
}
} else {
- if ( def.getClassName().endsWith( TraitFactory.SUFFIX ) ) {
- pkgRegistry.getTraitRegistry().addTrait( def.getClassName().replace( TraitFactory.SUFFIX,
- "" ),
- def );
+ if (def.getClassName().endsWith(TraitFactory.SUFFIX)) {
+ pkgRegistry.getTraitRegistry().addTrait(def.getClassName().replace(TraitFactory.SUFFIX,
+ ""),
+ def);
} else {
- pkgRegistry.getTraitRegistry().addTrait( def );
+ pkgRegistry.getTraitRegistry().addTrait(def);
}
}
}
- if ( type.isNovel() ) {
+ if (type.isNovel()) {
String fullName = typeDescr.getType().getFullName();
- JavaDialectRuntimeData dialect = (JavaDialectRuntimeData) pkgRegistry.getDialectRuntimeRegistry().getDialectData( "java" );
- switch ( type.getKind() ) {
- case TRAIT :
+ JavaDialectRuntimeData dialect = (JavaDialectRuntimeData) pkgRegistry.getDialectRuntimeRegistry().getDialectData("java");
+ switch (type.getKind()) {
+ case TRAIT:
try {
buildClass(def, fullName, dialect, configuration.getClassBuilderFactory().getTraitBuilder());
- } catch ( Exception e ) {
- this.results.add( new TypeDeclarationError( typeDescr,
- "Unable to compile declared trait " + fullName +
- ": " + e.getMessage() + ";" ) );
+ } catch (Exception e) {
+ this.results.add(new TypeDeclarationError(typeDescr,
+ "Unable to compile declared trait " + fullName +
+ ": " + e.getMessage() + ";"));
}
break;
- case ENUM :
+ case ENUM:
try {
buildClass(def, fullName, dialect, configuration.getClassBuilderFactory().getEnumClassBuilder());
- } catch ( Exception e ) {
+ } catch (Exception e) {
e.printStackTrace();
- this.results.add( new TypeDeclarationError( typeDescr,
- "Unable to compile declared enum " + fullName +
- ": " + e.getMessage() + ";" ) );
+ this.results.add(new TypeDeclarationError(typeDescr,
+ "Unable to compile declared enum " + fullName +
+ ": " + e.getMessage() + ";"));
}
break;
- case CLASS :
- default :
+ case CLASS:
+ default:
try {
buildClass(def, fullName, dialect, configuration.getClassBuilderFactory().getBeanClassBuilder());
- } catch ( Exception e ) {
- this.results.add( new TypeDeclarationError( typeDescr,
- "Unable to create a class for declared type " + fullName +
- ": " + e.getMessage() + ";" ) );
+ } catch (Exception e) {
+ this.results.add(new TypeDeclarationError(typeDescr,
+ "Unable to create a class for declared type " + fullName +
+ ": " + e.getMessage() + ";"));
}
break;
}
@@ -3132,16 +3172,16 @@ private void generateDeclaredBean(AbstractClassTypeDeclarationDescr typeDescr,
}
private void buildClass(ClassDefinition def, String fullName, JavaDialectRuntimeData dialect, ClassBuilder cb) throws Exception {
- byte[] bytecode = cb.buildClass( def );
- String resourceName = JavaDialectRuntimeData.convertClassToResourcePath( fullName );
- dialect.putClassDefinition( resourceName, bytecode );
- if ( ruleBase != null ) {
- ruleBase.registerAndLoadTypeDefinition( fullName, bytecode );
+ byte[] bytecode = cb.buildClass(def);
+ String resourceName = JavaDialectRuntimeData.convertClassToResourcePath(fullName);
+ dialect.putClassDefinition(resourceName, bytecode);
+ if (ruleBase != null) {
+ ruleBase.registerAndLoadTypeDefinition(fullName, bytecode);
} else {
if (rootClassLoader instanceof ProjectClassLoader) {
- ((ProjectClassLoader)rootClassLoader).defineClass(fullName, resourceName, bytecode);
+ ((ProjectClassLoader) rootClassLoader).defineClass(fullName, resourceName, bytecode);
} else {
- dialect.write( resourceName, bytecode );
+ dialect.write(resourceName, bytecode);
}
}
}
@@ -3157,71 +3197,71 @@ private void buildClass(ClassDefinition def, String fullName, JavaDialectRuntime
* @return
*/
private PriorityQueue<FieldDefinition> sortFields(Map<String, TypeFieldDescr> flds,
- PackageRegistry pkgRegistry) {
- PriorityQueue<FieldDefinition> queue = new PriorityQueue<FieldDefinition>( flds.size() );
+ PackageRegistry pkgRegistry) {
+ PriorityQueue<FieldDefinition> queue = new PriorityQueue<FieldDefinition>(flds.size());
int maxDeclaredPos = 0;
int curr = 0;
- BitSet occupiedPositions = new BitSet( flds.size() );
- for( TypeFieldDescr field : flds.values() ) {
+ BitSet occupiedPositions = new BitSet(flds.size());
+ for (TypeFieldDescr field : flds.values()) {
int pos = field.getIndex();
- if ( pos >= 0 ) {
- occupiedPositions.set( pos );
+ if (pos >= 0) {
+ occupiedPositions.set(pos);
}
- maxDeclaredPos = Math.max( maxDeclaredPos, pos );
+ maxDeclaredPos = Math.max(maxDeclaredPos, pos);
}
- for ( TypeFieldDescr field : flds.values() ) {
+ for (TypeFieldDescr field : flds.values()) {
try {
String typeName = field.getPattern().getObjectType();
- String fullFieldType = generatedTypes.contains( typeName ) ? typeName : pkgRegistry.getTypeResolver().resolveType( typeName ).getName();
+ String fullFieldType = generatedTypes.contains(typeName) ? typeName : pkgRegistry.getTypeResolver().resolveType(typeName).getName();
- FieldDefinition fieldDef = new FieldDefinition( field.getFieldName(),
- fullFieldType );
+ FieldDefinition fieldDef = new FieldDefinition(field.getFieldName(),
+ fullFieldType);
// field is marked as PK
- boolean isKey = field.getAnnotation( TypeDeclaration.ATTR_KEY ) != null;
- fieldDef.setKey( isKey );
-
- fieldDef.setDeclIndex( field.getIndex() );
- if ( field.getIndex() < 0 ) {
- int freePos = occupiedPositions.nextClearBit( 0 );
- if ( freePos < maxDeclaredPos ) {
- occupiedPositions.set( freePos );
+ boolean isKey = field.getAnnotation(TypeDeclaration.ATTR_KEY) != null;
+ fieldDef.setKey(isKey);
+
+ fieldDef.setDeclIndex(field.getIndex());
+ if (field.getIndex() < 0) {
+ int freePos = occupiedPositions.nextClearBit(0);
+ if (freePos < maxDeclaredPos) {
+ occupiedPositions.set(freePos);
} else {
freePos = maxDeclaredPos + 1;
}
- fieldDef.setPriority( freePos * 256 + curr++ );
+ fieldDef.setPriority(freePos * 256 + curr++);
} else {
- fieldDef.setPriority( field.getIndex() * 256 + curr++ );
+ fieldDef.setPriority(field.getIndex() * 256 + curr++);
}
- fieldDef.setInherited( field.isInherited() );
- fieldDef.setInitExpr( field.getInitExpr() );
+ fieldDef.setInherited(field.isInherited());
+ fieldDef.setInitExpr(field.getInitExpr());
- for ( String annotationName : field.getAnnotationNames() ) {
- Class annotation = resolveAnnotation( annotationName,
- pkgRegistry.getTypeResolver() );
- if ( annotation != null ) {
+ for (String annotationName : field.getAnnotationNames()) {
+ Class annotation = resolveAnnotation(annotationName,
+ pkgRegistry.getTypeResolver());
+ if (annotation != null) {
try {
- AnnotationDefinition annotationDefinition = AnnotationDefinition.build( annotation,
- field.getAnnotations().get( annotationName ).getValueMap(),
- pkgRegistry.getTypeResolver() );
- fieldDef.addAnnotation( annotationDefinition );
- } catch ( NoSuchMethodException nsme ) {
- this.results.add( new TypeDeclarationError( field,
- "Annotated field " + field.getFieldName() +
- " - undefined property in @annotation " +
- annotationName + ": " + nsme.getMessage() + ";" ) );
+ AnnotationDefinition annotationDefinition = AnnotationDefinition.build(annotation,
+ field.getAnnotations().get(annotationName).getValueMap(),
+ pkgRegistry.getTypeResolver());
+ fieldDef.addAnnotation(annotationDefinition);
+ } catch (NoSuchMethodException nsme) {
+ this.results.add(new TypeDeclarationError(field,
+ "Annotated field " + field.getFieldName() +
+ " - undefined property in @annotation " +
+ annotationName + ": " + nsme.getMessage() + ";"));
}
}
if (annotation == null || annotation == Key.class || annotation == Position.class) {
- fieldDef.addMetaData( annotationName, field.getAnnotation( annotationName ).getSingleValue() );
+ fieldDef.addMetaData(annotationName, field.getAnnotation(annotationName).getSingleValue());
}
}
- queue.add( fieldDef );
- } catch ( ClassNotFoundException cnfe ) {
- this.results.add( new TypeDeclarationError( field, cnfe.getMessage() ) );
+ queue.add(fieldDef);
+ } catch (ClassNotFoundException cnfe) {
+ this.results.add(new TypeDeclarationError(field, cnfe.getMessage()));
}
}
@@ -3230,45 +3270,45 @@ private PriorityQueue<FieldDefinition> sortFields(Map<String, TypeFieldDescr> fl
}
private void addFunction(final FunctionDescr functionDescr) {
- functionDescr.setResource( this.resource );
- PackageRegistry pkgRegistry = this.pkgRegistryMap.get( functionDescr.getNamespace() );
- Dialect dialect = pkgRegistry.getDialectCompiletimeRegistry().getDialect( functionDescr.getDialect() );
- dialect.addFunction( functionDescr,
- pkgRegistry.getTypeResolver(),
- this.resource );
+ functionDescr.setResource(this.resource);
+ PackageRegistry pkgRegistry = this.pkgRegistryMap.get(functionDescr.getNamespace());
+ Dialect dialect = pkgRegistry.getDialectCompiletimeRegistry().getDialect(functionDescr.getDialect());
+ dialect.addFunction(functionDescr,
+ pkgRegistry.getTypeResolver(),
+ this.resource);
}
private void preCompileAddFunction(final FunctionDescr functionDescr) {
- PackageRegistry pkgRegistry = this.pkgRegistryMap.get( functionDescr.getNamespace() );
- Dialect dialect = pkgRegistry.getDialectCompiletimeRegistry().getDialect( functionDescr.getDialect() );
- dialect.preCompileAddFunction( functionDescr,
- pkgRegistry.getTypeResolver() );
+ PackageRegistry pkgRegistry = this.pkgRegistryMap.get(functionDescr.getNamespace());
+ Dialect dialect = pkgRegistry.getDialectCompiletimeRegistry().getDialect(functionDescr.getDialect());
+ dialect.preCompileAddFunction(functionDescr,
+ pkgRegistry.getTypeResolver());
}
private void postCompileAddFunction(final FunctionDescr functionDescr) {
- PackageRegistry pkgRegistry = this.pkgRegistryMap.get( functionDescr.getNamespace() );
- Dialect dialect = pkgRegistry.getDialectCompiletimeRegistry().getDialect( functionDescr.getDialect() );
- dialect.postCompileAddFunction( functionDescr,
- pkgRegistry.getTypeResolver() );
+ PackageRegistry pkgRegistry = this.pkgRegistryMap.get(functionDescr.getNamespace());
+ Dialect dialect = pkgRegistry.getDialectCompiletimeRegistry().getDialect(functionDescr.getDialect());
+ dialect.postCompileAddFunction(functionDescr,
+ pkgRegistry.getTypeResolver());
}
private Map<String, RuleBuildContext> buildRuleBuilderContext(List<RuleDescr> rules) {
Map<String, RuleBuildContext> map = new HashMap<String, RuleBuildContext>();
- for ( RuleDescr ruleDescr : rules ) {
- if ( ruleDescr.getResource() == null ) {
- ruleDescr.setResource( resource );
+ for (RuleDescr ruleDescr : rules) {
+ if (ruleDescr.getResource() == null) {
+ ruleDescr.setResource(resource);
}
- PackageRegistry pkgRegistry = this.pkgRegistryMap.get( ruleDescr.getNamespace() );
+ PackageRegistry pkgRegistry = this.pkgRegistryMap.get(ruleDescr.getNamespace());
Package pkg = pkgRegistry.getPackage();
DialectCompiletimeRegistry ctr = pkgRegistry.getDialectCompiletimeRegistry();
- RuleBuildContext context = new RuleBuildContext( this,
- ruleDescr,
- ctr,
- pkg,
- ctr.getDialect( pkgRegistry.getDialect() ) );
- map.put( ruleDescr.getName(), context );
+ RuleBuildContext context = new RuleBuildContext(this,
+ ruleDescr,
+ ctr,
+ pkg,
+ ctr.getDialect(pkgRegistry.getDialect()));
+ map.put(ruleDescr.getName(), context);
}
return map;
@@ -3279,16 +3319,16 @@ private void addRule(RuleBuildContext context) {
Package pkg = context.getPkg();
- ruleBuilder.build( context );
+ ruleBuilder.build(context);
- this.results.addAll( context.getErrors() );
- this.results.addAll( context.getWarnings() );
+ this.results.addAll(context.getErrors());
+ this.results.addAll(context.getWarnings());
- context.getRule().setResource( ruleDescr.getResource() );
+ context.getRule().setResource(ruleDescr.getResource());
- context.getDialect().addRule( context );
+ context.getDialect().addRule(context);
- if ( context.needsStreamMode() ) {
+ if (context.needsStreamMode()) {
pkg.setNeedStreamMode();
}
}
@@ -3303,15 +3343,15 @@ private void addRule(RuleBuildContext context) {
*/
public Package getPackage() {
PackageRegistry pkgRegistry = null;
- if ( !this.pkgRegistryMap.isEmpty() ) {
+ if (!this.pkgRegistryMap.isEmpty()) {
pkgRegistry = (PackageRegistry) this.pkgRegistryMap.values().toArray()[currentRulePackage];
}
Package pkg = null;
- if ( pkgRegistry != null ) {
+ if (pkgRegistry != null) {
pkg = pkgRegistry.getPackage();
}
- if ( hasErrors() && pkg != null ) {
- pkg.setError( getErrors().toString() );
+ if (hasErrors() && pkg != null) {
+ pkg.setError(getErrors().toString());
}
return pkg;
}
@@ -3319,15 +3359,15 @@ public Package getPackage() {
public Package[] getPackages() {
Package[] pkgs = new Package[this.pkgRegistryMap.size()];
String errors = null;
- if ( !getErrors().isEmpty() ) {
+ if (!getErrors().isEmpty()) {
errors = getErrors().toString();
}
int i = 0;
- for ( PackageRegistry pkgRegistry : this.pkgRegistryMap.values() ) {
+ for (PackageRegistry pkgRegistry : this.pkgRegistryMap.values()) {
Package pkg = pkgRegistry.getPackage();
pkg.getDialectRuntimeRegistry().onBeforeExecute();
- if ( errors != null ) {
- pkg.setError( errors );
+ if (errors != null) {
+ pkg.setError(errors);
}
pkgs[i++] = pkg;
}
@@ -3345,7 +3385,7 @@ public PackageBuilderConfiguration getPackageBuilderConfiguration() {
}
public PackageRegistry getPackageRegistry(String name) {
- return this.pkgRegistryMap.get( name );
+ return this.pkgRegistryMap.get(name);
}
public Map<String, PackageRegistry> getPackageRegistry() {
@@ -3361,7 +3401,7 @@ public Collection<String> getPackageNames() {
}
public List<PackageDescr> getPackageDescrs(String packageName) {
- return packages.get( packageName );
+ return packages.get(packageName);
}
/**
@@ -3370,16 +3410,16 @@ public List<PackageDescr> getPackageDescrs(String packageName) {
*/
public DefaultExpander getDslExpander() {
DefaultExpander expander = new DefaultExpander();
- if ( this.dslFiles == null || this.dslFiles.isEmpty() ) {
+ if (this.dslFiles == null || this.dslFiles.isEmpty()) {
return null;
}
- for ( DSLMappingFile file : this.dslFiles ) {
- expander.addDSLMapping( file.getMapping() );
+ for (DSLMappingFile file : this.dslFiles) {
+ expander.addDSLMapping(file.getMapping());
}
return expander;
}
- public Map<String, Class< ? >> getGlobals() {
+ public Map<String, Class<?>> getGlobals() {
return this.globals;
}
@@ -3392,8 +3432,8 @@ public boolean hasErrors() {
}
public KnowledgeBuilderResults getProblems(ResultSeverity... problemTypes) {
- List<KnowledgeBuilderResult> problems = getResultList( problemTypes );
- return new PackageBuilderResults( problems.toArray( new BaseKnowledgeBuilderResultImpl[problems.size()] ) );
+ List<KnowledgeBuilderResult> problems = getResultList(problemTypes);
+ return new PackageBuilderResults(problems.toArray(new BaseKnowledgeBuilderResultImpl[problems.size()]));
}
/**
@@ -3401,28 +3441,28 @@ public KnowledgeBuilderResults getProblems(ResultSeverity... problemTypes) {
* @return
*/
private List<KnowledgeBuilderResult> getResultList(ResultSeverity... severities) {
- List<ResultSeverity> typesToFetch = Arrays.asList( severities );
+ List<ResultSeverity> typesToFetch = Arrays.asList(severities);
ArrayList<KnowledgeBuilderResult> problems = new ArrayList<KnowledgeBuilderResult>();
- for ( KnowledgeBuilderResult problem : results ) {
- if ( typesToFetch.contains( problem.getSeverity() ) ) {
- problems.add( problem );
+ for (KnowledgeBuilderResult problem : results) {
+ if (typesToFetch.contains(problem.getSeverity())) {
+ problems.add(problem);
}
}
return problems;
}
public boolean hasProblems(ResultSeverity... problemTypes) {
- return !getResultList( problemTypes ).isEmpty();
+ return !getResultList(problemTypes).isEmpty();
}
private List<DroolsError> getErrorList() {
List<DroolsError> errors = new ArrayList<DroolsError>();
- for ( KnowledgeBuilderResult problem : results ) {
- if ( problem.getSeverity() == ResultSeverity.ERROR ) {
- if ( problem instanceof ConfigurableSeverityResult ) {
- errors.add( new DroolsErrorWrapper( problem ) );
+ for (KnowledgeBuilderResult problem : results) {
+ if (problem.getSeverity() == ResultSeverity.ERROR) {
+ if (problem instanceof ConfigurableSeverityResult) {
+ errors.add(new DroolsErrorWrapper(problem));
} else {
- errors.add( (DroolsError) problem );
+ errors.add((DroolsError) problem);
}
}
}
@@ -3439,12 +3479,12 @@ public boolean hasInfo() {
public List<DroolsWarning> getWarningList() {
List<DroolsWarning> warnings = new ArrayList<DroolsWarning>();
- for ( KnowledgeBuilderResult problem : results ) {
- if ( problem.getSeverity() == ResultSeverity.WARNING ) {
- if ( problem instanceof ConfigurableSeverityResult ) {
- warnings.add( new DroolsWarningWrapper( problem ) );
+ for (KnowledgeBuilderResult problem : results) {
+ if (problem.getSeverity() == ResultSeverity.WARNING) {
+ if (problem instanceof ConfigurableSeverityResult) {
+ warnings.add(new DroolsWarningWrapper(problem));
} else {
- warnings.add( (DroolsWarning) problem );
+ warnings.add((DroolsWarning) problem);
}
}
}
@@ -3452,7 +3492,7 @@ public List<DroolsWarning> getWarningList() {
}
private List<KnowledgeBuilderResult> getInfoList() {
- return getResultList( ResultSeverity.INFO );
+ return getResultList(ResultSeverity.INFO);
}
/**
@@ -3461,7 +3501,7 @@ private List<KnowledgeBuilderResult> getInfoList() {
*/
public PackageBuilderErrors getErrors() {
List<DroolsError> errors = getErrorList();
- return new PackageBuilderErrors( errors.toArray( new DroolsError[errors.size()] ) );
+ return new PackageBuilderErrors(errors.toArray(new DroolsError[errors.size()]));
}
/**
@@ -3471,21 +3511,21 @@ public PackageBuilderErrors getErrors() {
* you will get spurious errors which will not be that helpful.
*/
protected void resetErrors() {
- resetProblemType( ResultSeverity.ERROR );
+ resetProblemType(ResultSeverity.ERROR);
}
protected void resetWarnings() {
- resetProblemType( ResultSeverity.WARNING );
+ resetProblemType(ResultSeverity.WARNING);
}
private void resetProblemType(ResultSeverity problemType) {
List<KnowledgeBuilderResult> toBeDeleted = new ArrayList<KnowledgeBuilderResult>();
- for ( KnowledgeBuilderResult problem : results ) {
- if ( problemType != null && problemType.equals( problem.getSeverity() ) ) {
- toBeDeleted.add( problem );
+ for (KnowledgeBuilderResult problem : results) {
+ if (problemType != null && problemType.equals(problem.getSeverity())) {
+ toBeDeleted.add(problem);
}
}
- this.results.removeAll( toBeDeleted );
+ this.results.removeAll(toBeDeleted);
}
@@ -3502,7 +3542,7 @@ public static class MissingPackageNameException extends IllegalArgumentException
private static final long serialVersionUID = 510l;
public MissingPackageNameException(final String message) {
- super( message );
+ super(message);
}
}
@@ -3512,7 +3552,7 @@ public static class PackageMergeException extends IllegalArgumentException {
private static final long serialVersionUID = 400L;
public PackageMergeException(final String message) {
- super( message );
+ super(message);
}
}
@@ -3542,7 +3582,7 @@ public boolean isInError() {
}
public void addError(final CompilationProblem err) {
- this.errors.add( err );
+ this.errors.add(err);
this.inError = true;
}
@@ -3559,11 +3599,11 @@ public void addError(final CompilationProblem err) {
* DroolsError instances. Its not 1 to 1 with reported errors.
*/
protected CompilationProblem[] collectCompilerProblems() {
- if ( this.errors.isEmpty() ) {
+ if (this.errors.isEmpty()) {
return null;
} else {
final CompilationProblem[] list = new CompilationProblem[this.errors.size()];
- this.errors.toArray( list );
+ this.errors.toArray(list);
return list;
}
}
@@ -3576,18 +3616,18 @@ public static class RuleErrorHandler extends ErrorHandler {
private Rule rule;
public RuleErrorHandler(final BaseDescr ruleDescr,
- final Rule rule,
- final String message) {
+ final Rule rule,
+ final String message) {
this.descr = ruleDescr;
this.rule = rule;
this.message = message;
}
public DroolsError getError() {
- return new RuleBuildError( this.rule,
- this.descr,
- collectCompilerProblems(),
- this.message );
+ return new RuleBuildError(this.rule,
+ this.descr,
+ collectCompilerProblems(),
+ this.message);
}
}
@@ -3598,11 +3638,11 @@ public DroolsError getError() {
public static class RuleInvokerErrorHandler extends RuleErrorHandler {
public RuleInvokerErrorHandler(final BaseDescr ruleDescr,
- final Rule rule,
- final String message) {
- super( ruleDescr,
- rule,
- message );
+ final Rule rule,
+ final String message) {
+ super(ruleDescr,
+ rule,
+ message);
}
}
@@ -3611,15 +3651,15 @@ public static class FunctionErrorHandler extends ErrorHandler {
private FunctionDescr descr;
public FunctionErrorHandler(final FunctionDescr functionDescr,
- final String message) {
+ final String message) {
this.descr = functionDescr;
this.message = message;
}
public DroolsError getError() {
- return new FunctionError( this.descr,
- collectCompilerProblems(),
- this.message );
+ return new FunctionError(this.descr,
+ collectCompilerProblems(),
+ this.message);
}
}
@@ -3631,8 +3671,8 @@ public SrcErrorHandler(final String message) {
}
public DroolsError getError() {
- return new SrcError( collectCompilerProblems(),
- this.message );
+ return new SrcError(collectCompilerProblems(),
+ this.message);
}
}
@@ -3644,8 +3684,8 @@ public static class SrcError extends DroolsError {
private int[] errorLines = new int[0];
public SrcError(Object object,
- String message) {
- super( null );
+ String message) {
+ super(null);
this.object = object;
this.message = message;
}
@@ -3664,18 +3704,18 @@ public String getMessage() {
public String toString() {
final StringBuilder buf = new StringBuilder();
- buf.append( this.message );
- buf.append( " : " );
- buf.append( "\n" );
- if ( this.object instanceof CompilationProblem[] ) {
+ buf.append(this.message);
+ buf.append(" : ");
+ buf.append("\n");
+ if (this.object instanceof CompilationProblem[]) {
final CompilationProblem[] problem = (CompilationProblem[]) this.object;
- for ( CompilationProblem aProblem : problem ) {
- buf.append( "\t" );
- buf.append( aProblem );
- buf.append( "\n" );
+ for (CompilationProblem aProblem : problem) {
+ buf.append("\t");
+ buf.append(aProblem);
+ buf.append("\n");
}
- } else if ( this.object != null ) {
- buf.append( this.object );
+ } else if (this.object != null) {
+ buf.append(this.object);
}
return buf.toString();
}
@@ -3701,68 +3741,68 @@ public Collection<AbstractClassTypeDeclarationDescr> sortByHierarchy(List<Abstra
Map<QualifiedName, Collection<QualifiedName>> taxonomy = new HashMap<QualifiedName, Collection<QualifiedName>>();
Map<QualifiedName, AbstractClassTypeDeclarationDescr> cache = new HashMap<QualifiedName, AbstractClassTypeDeclarationDescr>();
- for ( AbstractClassTypeDeclarationDescr tdescr : typeDeclarations ) {
+ for (AbstractClassTypeDeclarationDescr tdescr : typeDeclarations) {
QualifiedName name = tdescr.getType();
- cache.put( name, tdescr );
+ cache.put(name, tdescr);
- if ( taxonomy.get( name ) == null ) {
- taxonomy.put( name, new ArrayList<QualifiedName>() );
+ if (taxonomy.get(name) == null) {
+ taxonomy.put(name, new ArrayList<QualifiedName>());
} else {
- this.results.add( new TypeDeclarationError( tdescr,
- "Found duplicate declaration for type " + tdescr.getTypeName() ) );
+ this.results.add(new TypeDeclarationError(tdescr,
+ "Found duplicate declaration for type " + tdescr.getTypeName()));
}
- Collection<QualifiedName> supers = taxonomy.get( name );
+ Collection<QualifiedName> supers = taxonomy.get(name);
boolean circular = false;
- for ( QualifiedName sup : tdescr.getSuperTypes() ) {
- if ( !Object.class.getName().equals( name.getFullName() ) ) {
- if ( !hasCircularDependency( tdescr.getType(), sup, taxonomy ) ) {
- supers.add( sup );
+ for (QualifiedName sup : tdescr.getSuperTypes()) {
+ if (!Object.class.getName().equals(name.getFullName())) {
+ if (!hasCircularDependency(tdescr.getType(), sup, taxonomy)) {
+ supers.add(sup);
} else {
circular = true;
- this.results.add( new TypeDeclarationError( tdescr,
- "Found circular dependency for type " + tdescr.getTypeName() ) );
+ this.results.add(new TypeDeclarationError(tdescr,
+ "Found circular dependency for type " + tdescr.getTypeName()));
break;
}
}
}
- if ( circular ) {
+ if (circular) {
tdescr.getSuperTypes().clear();
}
- for ( TypeFieldDescr field : tdescr.getFields().values() ) {
- QualifiedName typeName = new QualifiedName( field.getPattern().getObjectType() );
- if ( !hasCircularDependency( name, typeName, taxonomy ) ) {
- supers.add( typeName );
+ for (TypeFieldDescr field : tdescr.getFields().values()) {
+ QualifiedName typeName = new QualifiedName(field.getPattern().getObjectType());
+ if (!hasCircularDependency(name, typeName, taxonomy)) {
+ supers.add(typeName);
}
}
}
- List<QualifiedName> sorted = sorter.sort( taxonomy );
- ArrayList list = new ArrayList( sorted.size() );
- for ( QualifiedName name : sorted ) {
- list.add( cache.get( name ) );
+ List<QualifiedName> sorted = sorter.sort(taxonomy);
+ ArrayList list = new ArrayList(sorted.size());
+ for (QualifiedName name : sorted) {
+ list.add(cache.get(name));
}
return list;
}
private boolean hasCircularDependency(QualifiedName name,
- QualifiedName typeName,
- Map<QualifiedName, Collection<QualifiedName>> taxonomy) {
- if ( name.equals( typeName ) ) {
+ QualifiedName typeName,
+ Map<QualifiedName, Collection<QualifiedName>> taxonomy) {
+ if (name.equals(typeName)) {
return true;
}
- if ( taxonomy.containsKey( typeName ) ) {
- Collection<QualifiedName> parents = taxonomy.get( typeName );
- if ( parents.contains( name ) ) {
+ if (taxonomy.containsKey(typeName)) {
+ Collection<QualifiedName> parents = taxonomy.get(typeName);
+ if (parents.contains(name)) {
return true;
} else {
- for ( QualifiedName ancestor : parents ) {
- if ( hasCircularDependency( name, ancestor, taxonomy ) ) {
+ for (QualifiedName ancestor : parents) {
+ if (hasCircularDependency(name, ancestor, taxonomy)) {
return true;
}
}
@@ -3773,55 +3813,55 @@ private boolean hasCircularDependency(QualifiedName name,
//Entity rules inherit package attributes
private void inheritPackageAttributes(Map<String, AttributeDescr> pkgAttributes,
- RuleDescr ruleDescr) {
- if ( pkgAttributes == null ) {
+ RuleDescr ruleDescr) {
+ if (pkgAttributes == null) {
return;
}
- for ( AttributeDescr attrDescr : pkgAttributes.values() ) {
+ for (AttributeDescr attrDescr : pkgAttributes.values()) {
String name = attrDescr.getName();
- AttributeDescr ruleAttrDescr = ruleDescr.getAttributes().get( name );
- if ( ruleAttrDescr == null ) {
- ruleDescr.getAttributes().put( name,
- attrDescr );
+ AttributeDescr ruleAttrDescr = ruleDescr.getAttributes().get(name);
+ if (ruleAttrDescr == null) {
+ ruleDescr.getAttributes().put(name,
+ attrDescr);
}
}
}
private int compareTypeDeclarations(TypeDeclaration oldDeclaration,
- TypeDeclaration newDeclaration) throws IncompatibleClassChangeError {
+ TypeDeclaration newDeclaration) throws IncompatibleClassChangeError {
//different formats -> incompatible
- if ( !oldDeclaration.getFormat().equals( newDeclaration.getFormat() ) ) {
- throw new IncompatibleClassChangeError( "Type Declaration " + newDeclaration.getTypeName() + " has a different"
- + " format that its previous definition: " + newDeclaration.getFormat() + "!=" + oldDeclaration.getFormat() );
+ if (!oldDeclaration.getFormat().equals(newDeclaration.getFormat())) {
+ throw new IncompatibleClassChangeError("Type Declaration " + newDeclaration.getTypeName() + " has a different"
+ + " format that its previous definition: " + newDeclaration.getFormat() + "!=" + oldDeclaration.getFormat());
}
//different superclasses -> Incompatible (TODO: check for hierarchy)
- if ( !oldDeclaration.getTypeClassDef().getSuperClass().equals( newDeclaration.getTypeClassDef().getSuperClass() ) ) {
- if ( oldDeclaration.getNature() == TypeDeclaration.Nature.DEFINITION
- && newDeclaration.getNature() == TypeDeclaration.Nature.DECLARATION
- && Object.class.getName().equals( newDeclaration.getTypeClassDef().getSuperClass() ) ) {
+ if (!oldDeclaration.getTypeClassDef().getSuperClass().equals(newDeclaration.getTypeClassDef().getSuperClass())) {
+ if (oldDeclaration.getNature() == TypeDeclaration.Nature.DEFINITION
+ && newDeclaration.getNature() == TypeDeclaration.Nature.DECLARATION
+ && Object.class.getName().equals(newDeclaration.getTypeClassDef().getSuperClass())) {
// actually do nothing. The new declaration just recalls the previous definition, probably to extend it.
} else {
- throw new IncompatibleClassChangeError( "Type Declaration " + newDeclaration.getTypeName() + " has a different"
- + " superclass that its previous definition: " + newDeclaration.getTypeClassDef().getSuperClass()
- + " != " + oldDeclaration.getTypeClassDef().getSuperClass() );
+ throw new IncompatibleClassChangeError("Type Declaration " + newDeclaration.getTypeName() + " has a different"
+ + " superclass that its previous definition: " + newDeclaration.getTypeClassDef().getSuperClass()
+ + " != " + oldDeclaration.getTypeClassDef().getSuperClass());
}
}
//different duration -> Incompatible
- if ( !this.nullSafeEqualityComparison( oldDeclaration.getDurationAttribute(), newDeclaration.getDurationAttribute() ) ) {
- throw new IncompatibleClassChangeError( "Type Declaration " + newDeclaration.getTypeName() + " has a different"
- + " duration: " + newDeclaration.getDurationAttribute()
- + " != " + oldDeclaration.getDurationAttribute() );
+ if (!this.nullSafeEqualityComparison(oldDeclaration.getDurationAttribute(), newDeclaration.getDurationAttribute())) {
+ throw new IncompatibleClassChangeError("Type Declaration " + newDeclaration.getTypeName() + " has a different"
+ + " duration: " + newDeclaration.getDurationAttribute()
+ + " != " + oldDeclaration.getDurationAttribute());
}
// //different masks -> incompatible
- if ( newDeclaration.getNature().equals( TypeDeclaration.Nature.DEFINITION ) ) {
- if ( oldDeclaration.getSetMask() != newDeclaration.getSetMask() ) {
- throw new IncompatibleClassChangeError( "Type Declaration " + newDeclaration.getTypeName() + " is incompatible with"
- + " the previous definition: " + newDeclaration
- + " != " + oldDeclaration );
+ if (newDeclaration.getNature().equals(TypeDeclaration.Nature.DEFINITION)) {
+ if (oldDeclaration.getSetMask() != newDeclaration.getSetMask()) {
+ throw new IncompatibleClassChangeError("Type Declaration " + newDeclaration.getTypeName() + " is incompatible with"
+ + " the previous definition: " + newDeclaration
+ + " != " + oldDeclaration);
}
}
@@ -3830,24 +3870,24 @@ private int compareTypeDeclarations(TypeDeclaration oldDeclaration,
//Field comparison
List<FactField> oldFields = oldDeclaration.getTypeClassDef().getFields();
Map<String, FactField> newFieldsMap = new HashMap<String, FactField>();
- for ( FactField factField : newDeclaration.getTypeClassDef().getFields() ) {
- newFieldsMap.put( factField.getName(), factField );
+ for (FactField factField : newDeclaration.getTypeClassDef().getFields()) {
+ newFieldsMap.put(factField.getName(), factField);
}
//each of the fields in the old definition that are also present in the
//new definition must have the same type. If not -> Incompatible
boolean allFieldsInOldDeclarationAreStillPresent = true;
- for ( FactField oldFactField : oldFields ) {
- FactField newFactField = newFieldsMap.get( oldFactField.getName() );
+ for (FactField oldFactField : oldFields) {
+ FactField newFactField = newFieldsMap.get(oldFactField.getName());
- if ( newFactField != null ) {
+ if (newFactField != null) {
//we can't use newFactField.getType() since it throws a NPE at this point.
String newFactType = ((FieldDefinition) newFactField).getTypeName();
- if ( !newFactType.equals( oldFactField.getType().getCanonicalName() ) ) {
- throw new IncompatibleClassChangeError( "Type Declaration " + newDeclaration.getTypeName() + "." + newFactField.getName() + " has a different"
- + " type that its previous definition: " + newFactType
- + " != " + oldFactField.getType().getCanonicalName() );
+ if (!newFactType.equals(oldFactField.getType().getCanonicalName())) {
+ throw new IncompatibleClassChangeError("Type Declaration " + newDeclaration.getTypeName() + "." + newFactField.getName() + " has a different"
+ + " type that its previous definition: " + newFactType
+ + " != " + oldFactField.getType().getCanonicalName());
}
} else {
allFieldsInOldDeclarationAreStillPresent = false;
@@ -3856,12 +3896,12 @@ private int compareTypeDeclarations(TypeDeclaration oldDeclaration,
}
//If the old declaration has less fields than the new declaration, oldDefinition < newDefinition
- if ( oldFields.size() < newFieldsMap.size() ) {
+ if (oldFields.size() < newFieldsMap.size()) {
return -1;
}
//If the old declaration has more fields than the new declaration, oldDefinition > newDefinition
- if ( oldFields.size() > newFieldsMap.size() ) {
+ if (oldFields.size() > newFieldsMap.size()) {
return 1;
}
@@ -3869,14 +3909,14 @@ private int compareTypeDeclarations(TypeDeclaration oldDeclaration,
//and all the fieds present in the old declaration are also present in
//the new declaration, then they are considered "equal", otherwise
//they are incompatible
- if ( allFieldsInOldDeclarationAreStillPresent ) {
+ if (allFieldsInOldDeclarationAreStillPresent) {
return 0;
}
//Both declarations have the same number of fields, but not all the
//fields in the old declaration are present in the new declaration.
- throw new IncompatibleClassChangeError( newDeclaration.getTypeName() + " introduces"
- + " fields that are not present in its previous version." );
+ throw new IncompatibleClassChangeError(newDeclaration.getTypeName() + " introduces"
+ + " fields that are not present in its previous version.");
}
@@ -3886,37 +3926,38 @@ private int compareTypeDeclarations(TypeDeclaration oldDeclaration,
* @param newDeclaration
*/
private void mergeTypeDeclarations(TypeDeclaration oldDeclaration,
- TypeDeclaration newDeclaration) {
- if ( oldDeclaration == null ) {
+ TypeDeclaration newDeclaration) {
+ if (oldDeclaration == null) {
return;
}
//add the missing fields (if any) to newDeclaration
- for ( FieldDefinition oldFactField : oldDeclaration.getTypeClassDef().getFieldsDefinitions() ) {
- FieldDefinition newFactField = newDeclaration.getTypeClassDef().getField( oldFactField.getName() );
- if ( newFactField == null ) {
- newDeclaration.getTypeClassDef().addField( oldFactField );
+ for (FieldDefinition oldFactField : oldDeclaration.getTypeClassDef().getFieldsDefinitions()) {
+ FieldDefinition newFactField = newDeclaration.getTypeClassDef().getField(oldFactField.getName());
+ if (newFactField == null) {
+ newDeclaration.getTypeClassDef().addField(oldFactField);
}
}
//copy the defined class
- newDeclaration.setTypeClass( oldDeclaration.getTypeClass() );
+ newDeclaration.setTypeClass(oldDeclaration.getTypeClass());
}
private boolean nullSafeEqualityComparison(Comparable c1,
- Comparable c2) {
- if ( c1 == null ) {
+ Comparable c2) {
+ if (c1 == null) {
return c2 == null;
}
- return c2 != null && c1.compareTo( c2 ) == 0;
+ return c2 != null && c1.compareTo(c2) == 0;
}
static class TypeDefinition {
+
private final AbstractClassTypeDeclarationDescr typeDescr;
private final TypeDeclaration type;
private TypeDefinition(TypeDeclaration type,
- AbstractClassTypeDeclarationDescr typeDescr) {
+ AbstractClassTypeDeclarationDescr typeDescr) {
this.type = type;
this.typeDescr = typeDescr;
}
@@ -3930,20 +3971,20 @@ public String getNamespace() {
}
}
- private ChangeSet parseChangeSet( Resource resource ) throws IOException, SAXException {
- XmlChangeSetReader reader = new XmlChangeSetReader( this.configuration.getSemanticModules() );
+ private ChangeSet parseChangeSet(Resource resource) throws IOException, SAXException {
+ XmlChangeSetReader reader = new XmlChangeSetReader(this.configuration.getSemanticModules());
if (resource instanceof ClassPathResource) {
- reader.setClassLoader( ( (ClassPathResource) resource ).getClassLoader(),
- ( (ClassPathResource) resource ).getClazz() );
+ reader.setClassLoader(((ClassPathResource) resource).getClassLoader(),
+ ((ClassPathResource) resource).getClazz());
} else {
- reader.setClassLoader( this.configuration.getClassLoader(),
- null );
+ reader.setClassLoader(this.configuration.getClassLoader(),
+ null);
}
Reader resourceReader = null;
try {
resourceReader = resource.getReader();
- ChangeSet changeSet = reader.read( resourceReader );
+ ChangeSet changeSet = reader.read(resourceReader);
return changeSet;
} finally {
if (resourceReader != null) {
@@ -3952,103 +3993,106 @@ private ChangeSet parseChangeSet( Resource resource ) throws IOException, SAXExc
}
}
- public void registerBuildResource( final Resource resource, ResourceType type ) {
+ public void registerBuildResource(final Resource resource, ResourceType type) {
InternalResource ires = (InternalResource) resource;
- if ( ires.getResourceType() == null ) {
- ires.setResourceType( type );
- } else if ( ires.getResourceType() != type ) {
- this.results.add( new ResourceTypeDeclarationWarning( resource, ires.getResourceType(), type ) );
+ if (ires.getResourceType() == null) {
+ ires.setResourceType(type);
+ } else if (ires.getResourceType() != type) {
+ this.results.add(new ResourceTypeDeclarationWarning(resource, ires.getResourceType(), type));
}
- if ( ResourceType.CHANGE_SET == type ) {
+ if (ResourceType.CHANGE_SET == type) {
try {
- ChangeSet changeSet = parseChangeSet( resource );
- List<Resource> resources = new ArrayList<Resource>( );
- resources.add( resource );
- for ( Resource addedRes : changeSet.getResourcesAdded() ) {
- resources.add( addedRes );
+ ChangeSet changeSet = parseChangeSet(resource);
+ List<Resource> resources = new ArrayList<Resource>();
+ resources.add(resource);
+ for (Resource addedRes : changeSet.getResourcesAdded()) {
+ resources.add(addedRes);
}
- for ( Resource modifiedRes : changeSet.getResourcesModified() ) {
- resources.add( modifiedRes );
+ for (Resource modifiedRes : changeSet.getResourcesModified()) {
+ resources.add(modifiedRes);
}
- for ( Resource removedRes : changeSet.getResourcesRemoved() ) {
- resources.add( removedRes );
+ for (Resource removedRes : changeSet.getResourcesRemoved()) {
+ resources.add(removedRes);
}
- buildResources.push( resources );
- } catch ( Exception e ) {
- results.add( new DroolsError() {
+ buildResources.push(resources);
+ } catch (Exception e) {
+ results.add(new DroolsError() {
+
public String getMessage() {
return "Unable to register changeset resource " + resource;
}
- public int[] getLines() { return new int[ 0 ]; }
- } );
+
+ public int[] getLines() {
+ return new int[0];
+ }
+ });
}
} else {
- buildResources.push( Arrays.asList( resource ) );
+ buildResources.push(Arrays.asList(resource));
}
}
-
public void registerBuildResources(List<Resource> resources) {
- buildResources.push( resources );
+ buildResources.push(resources);
}
public void undo() {
- if ( buildResources.isEmpty() ) {
+ if (buildResources.isEmpty()) {
return;
}
- for ( Resource resource : buildResources.pop() ) {
- removeObjectsGeneratedFromResource( resource );
+ for (Resource resource : buildResources.pop()) {
+ removeObjectsGeneratedFromResource(resource);
}
}
public boolean removeObjectsGeneratedFromResource(Resource resource) {
boolean modified = false;
- if ( pkgRegistryMap != null ) {
- for ( PackageRegistry packageRegistry : pkgRegistryMap.values() ) {
- modified = packageRegistry.removeObjectsGeneratedFromResource( resource ) || modified;
+ if (pkgRegistryMap != null) {
+ for (PackageRegistry packageRegistry : pkgRegistryMap.values()) {
+ modified = packageRegistry.removeObjectsGeneratedFromResource(resource) || modified;
}
}
- if ( results != null ) {
+ if (results != null) {
Iterator<KnowledgeBuilderResult> i = results.iterator();
- while ( i.hasNext() ) {
- if ( resource.equals( i.next().getResource() ) ) {
+ while (i.hasNext()) {
+ if (resource.equals(i.next().getResource())) {
i.remove();
}
}
}
- if ( processBuilder != null && processBuilder.getErrors() != null ) {
+ if (processBuilder != null && processBuilder.getErrors() != null) {
Iterator<? extends KnowledgeBuilderResult> i = processBuilder.getErrors().iterator();
- while ( i.hasNext() ) {
- if ( resource.equals( i.next().getResource() ) ) {
+ while (i.hasNext()) {
+ if (resource.equals(i.next().getResource())) {
i.remove();
}
}
}
- if ( results.size() == 0 ) {
+ if (results.size() == 0) {
// TODO Error attribution might be bugged
- for ( PackageRegistry packageRegistry : pkgRegistryMap.values() ) {
+ for (PackageRegistry packageRegistry : pkgRegistryMap.values()) {
packageRegistry.getPackage().resetErrors();
}
}
- if ( cacheTypes != null ) {
+ if (cacheTypes != null) {
List<String> typesToBeRemoved = new ArrayList<String>();
- for ( Map.Entry<String, TypeDeclaration> type : cacheTypes.entrySet() ) {
- if ( resource.equals( type.getValue().getResource() ) ) {
- typesToBeRemoved.add( type.getKey() );
+ for (Map.Entry<String, TypeDeclaration> type : cacheTypes.entrySet()) {
+ if (resource.equals(type.getValue().getResource())) {
+ typesToBeRemoved.add(type.getKey());
}
}
- for ( String type : typesToBeRemoved ) {
- cacheTypes.remove( type );
+ for (String type : typesToBeRemoved) {
+ cacheTypes.remove(type);
}
}
- for ( List<PackageDescr> pkgDescrs : packages.values() ) {
- for ( PackageDescr pkgDescr : pkgDescrs ) {
- pkgDescr.removeObjectsGeneratedFromResource( resource );
+ for (List<PackageDescr> pkgDescrs : packages.values()) {
+ for (PackageDescr pkgDescr : pkgDescrs) {
+ pkgDescr.removeObjectsGeneratedFromResource(resource);
}
}
@@ -4058,4 +4102,20 @@ public boolean removeObjectsGeneratedFromResource(Resource resource) {
return modified;
}
+
+ public static interface AssetFilter {
+ public static enum Action {
+ DO_NOTHING, ADD, REMOVE, UPDATE;
+ }
+
+ public Action accept(String pkgName, String assetName);
+ }
+
+ public AssetFilter getAssetFilter() {
+ return assetFilter;
+ }
+
+ public void setAssetFilter(AssetFilter assetFilter) {
+ this.assetFilter = assetFilter;
+ }
}
diff --git a/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/AbstractKieModule.java b/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/AbstractKieModule.java
index 984d6f7f532..fa8c4a94850 100644
--- a/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/AbstractKieModule.java
+++ b/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/AbstractKieModule.java
@@ -9,6 +9,7 @@
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
+import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
@@ -39,6 +40,8 @@
import org.kie.internal.builder.KnowledgeBuilder;
import org.kie.internal.builder.KnowledgeBuilderError;
import org.kie.internal.builder.KnowledgeBuilderFactory;
+import org.kie.internal.builder.ResourceChange;
+import org.kie.internal.builder.ResourceChangeSet;
import org.kie.internal.definition.KnowledgePackage;
import org.kie.internal.io.ResourceFactory;
import org.kie.internal.io.ResourceTypeImpl;
@@ -351,4 +354,27 @@ private void validatePomModel(PomModel pomModel) {
private byte[] getPomXml() {
return getBytes(((ReleaseIdImpl)releaseId).getPomXmlPath());
}
+
+ public static boolean updateResource(CompositeKnowledgeBuilder ckbuilder,
+ InternalKieModule kieModule,
+ String resourceName,
+ ResourceChangeSet changes) {
+ ResourceConfiguration conf = getResourceConfiguration(kieModule, resourceName);
+ Resource resource = kieModule.getResource(resourceName);
+ if (resource != null) {
+ if (conf == null) {
+ ckbuilder.add(resource,
+ ResourceType.determineResourceType(resourceName),
+ changes );
+ } else {
+ ckbuilder.add(resource,
+ ResourceType.determineResourceType(resourceName),
+ conf,
+ changes );
+ }
+ return true;
+ }
+ return false;
+ }
+
}
diff --git a/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieContainerImpl.java b/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieContainerImpl.java
index 48243217ef1..0d1d267b562 100644
--- a/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieContainerImpl.java
+++ b/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieContainerImpl.java
@@ -14,9 +14,7 @@
import org.drools.compiler.builder.impl.KnowledgeBuilderImpl;
import org.drools.compiler.compiler.PackageBuilder;
import org.drools.compiler.kie.util.ChangeSetBuilder;
-import org.drools.compiler.kie.util.ChangeType;
import org.drools.compiler.kie.util.KieJarChangeSet;
-import org.drools.compiler.kie.util.ResourceChangeSet;
import org.drools.compiler.kproject.models.KieBaseModelImpl;
import org.drools.compiler.kproject.models.KieSessionModelImpl;
import org.drools.core.definitions.impl.KnowledgePackageImp;
@@ -34,6 +32,8 @@
import org.kie.api.builder.model.KieSessionModel;
import org.kie.api.conf.EventProcessingOption;
import org.kie.api.event.KieRuntimeEventManager;
+import org.kie.api.io.Resource;
+import org.kie.api.io.ResourceType;
import org.kie.api.logger.KieLoggers;
import org.kie.api.runtime.Environment;
import org.kie.api.runtime.KieSession;
@@ -41,9 +41,12 @@
import org.kie.api.runtime.StatelessKieSession;
import org.kie.internal.KnowledgeBase;
import org.kie.internal.KnowledgeBaseFactory;
+import org.kie.internal.builder.ChangeType;
import org.kie.internal.builder.CompositeKnowledgeBuilder;
import org.kie.internal.builder.KnowledgeBuilder;
import org.kie.internal.builder.KnowledgeBuilderFactory;
+import org.kie.internal.builder.ResourceChange;
+import org.kie.internal.builder.ResourceChangeSet;
import org.kie.internal.definition.KnowledgePackage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -121,9 +124,23 @@ public void updateToVersion(ReleaseId newReleaseId) {
if( ! rcs.getChangeType().equals( ChangeType.REMOVED ) ) {
String resourceName = rcs.getResourceName();
if( KieBuilderImpl.filterFileInKBase( kieBaseModel, resourceName ) && ! resourceName.endsWith( ".properties" ) ) {
- fileCount += AbstractKieModule.addFile( ckbuilder,
- newKM,
- resourceName ) ? 1 : 0;
+ Resource resource = currentKM.getResource( rcs.getResourceName() );
+ List<ResourceChange> changes = rcs.getChanges();
+ if( ! changes.isEmpty() ) {
+ // we need to deal with individual parts of the resource
+ fileCount += AbstractKieModule.updateResource( ckbuilder,
+ newKM,
+ resourceName,
+ rcs ) ? 1 : 0;
+ } else {
+ // the whole resource has to handled
+ if( rcs.getChangeType().equals( ChangeType.UPDATED ) ) {
+ pkgbuilder.removeObjectsGeneratedFromResource( resource );
+ }
+ fileCount += AbstractKieModule.addFile( ckbuilder,
+ newKM,
+ resourceName ) ? 1 : 0;
+ }
}
}
}
diff --git a/drools-compiler/src/main/java/org/drools/compiler/kie/util/ChangeSetBuilder.java b/drools-compiler/src/main/java/org/drools/compiler/kie/util/ChangeSetBuilder.java
index ca3b49bdbc8..b154afb38cb 100644
--- a/drools-compiler/src/main/java/org/drools/compiler/kie/util/ChangeSetBuilder.java
+++ b/drools-compiler/src/main/java/org/drools/compiler/kie/util/ChangeSetBuilder.java
@@ -24,11 +24,14 @@
import java.util.List;
import org.drools.compiler.compiler.DrlParser;
-import org.drools.core.io.impl.ByteArrayResource;
+import org.drools.compiler.kie.builder.impl.InternalKieModule;
import org.drools.compiler.lang.descr.PackageDescr;
import org.drools.compiler.lang.descr.RuleDescr;
-import org.drools.compiler.kie.builder.impl.InternalKieModule;
+import org.drools.core.io.impl.ByteArrayResource;
import org.kie.api.io.ResourceType;
+import org.kie.internal.builder.ChangeType;
+import org.kie.internal.builder.ResourceChange;
+import org.kie.internal.builder.ResourceChangeSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -88,10 +91,14 @@ public ResourceChangeSet diffResource(String file,
boolean found = false;
for( Iterator<RuleDescr> it = orules.iterator(); it.hasNext(); ) {
RuleDescr ord = it.next();
- if( ord.getName().equals( crd ) ) {
+ if( ord.getName().equals( crd.getName() ) ) {
found = true;
it.remove();
- if( !ord.equals( crd ) ) {
+
+ // using byte[] comparison because using the descriptor equals() method
+ // is brittle and heavier than iterating an array
+ if( !segmentEquals(ob, ord.getStartCharacter(), ord.getEndCharacter(),
+ cb, crd.getStartCharacter(), crd.getEndCharacter() ) ) {
pkgcs.getChanges().add( new ResourceChange( ChangeType.UPDATED,
ResourceChange.Type.RULE,
crd.getName() ) );
@@ -124,6 +131,21 @@ public int compare(ResourceChange o1,
return pkgcs;
}
+ private boolean segmentEquals( byte[] a1, int s1, int e1,
+ byte[] a2, int s2, int e2) {
+ int length = e1 - s1;
+ if( length <= 0 || length != e2-s2 || s1+length > a1.length || s2+length > a2.length ) {
+ return false;
+ }
+ for( int i = 0; i < length; i++ ) {
+ if( a1[s1+i] != a2[s2+i] ) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+
public String toProperties( KieJarChangeSet kcs ) {
StringBuilder builder = new StringBuilder();
builder.append( "kiejar.changeset.version=1.0\n" );
diff --git a/drools-compiler/src/main/java/org/drools/compiler/kie/util/ChangeType.java b/drools-compiler/src/main/java/org/drools/compiler/kie/util/ChangeType.java
deleted file mode 100644
index 0a2c4a9dcc3..00000000000
--- a/drools-compiler/src/main/java/org/drools/compiler/kie/util/ChangeType.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package org.drools.compiler.kie.util;
-
-public enum ChangeType {
- REMOVED, UPDATED, ADDED;
-
- public String toString() {
- return super.toString().toLowerCase();
- }
-}
diff --git a/drools-compiler/src/main/java/org/drools/compiler/kie/util/KieJarChangeSet.java b/drools-compiler/src/main/java/org/drools/compiler/kie/util/KieJarChangeSet.java
index bd812a91214..9420297b7a8 100644
--- a/drools-compiler/src/main/java/org/drools/compiler/kie/util/KieJarChangeSet.java
+++ b/drools-compiler/src/main/java/org/drools/compiler/kie/util/KieJarChangeSet.java
@@ -3,6 +3,8 @@
import java.util.HashMap;
import java.util.Map;
+import org.kie.internal.builder.ResourceChangeSet;
+
public class KieJarChangeSet {
private final Map<String, ResourceChangeSet> changes = new HashMap<String, ResourceChangeSet>();
diff --git a/drools-compiler/src/main/java/org/drools/compiler/kie/util/ResourceChange.java b/drools-compiler/src/main/java/org/drools/compiler/kie/util/ResourceChange.java
deleted file mode 100644
index 8e5e2318f24..00000000000
--- a/drools-compiler/src/main/java/org/drools/compiler/kie/util/ResourceChange.java
+++ /dev/null
@@ -1,59 +0,0 @@
-package org.drools.compiler.kie.util;
-
-
-public class ResourceChange {
- public static enum Type {
- RULE, DECLARATION, FUNCTION;
- public String toString() {
- return super.toString().toLowerCase();
- }
- }
- private final ChangeType action;
- private final ResourceChange.Type type;
- private final String name;
- public ResourceChange(ChangeType action,
- ResourceChange.Type type,
- String name) {
- super();
- this.action = action;
- this.type = type;
- this.name = name;
- }
- public ChangeType getChangeType() {
- return action;
- }
- public ResourceChange.Type getType() {
- return type;
- }
- public String getName() {
- return name;
- }
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((action == null) ? 0 : action.hashCode());
- result = prime * result + ((name == null) ? 0 : name.hashCode());
- result = prime * result + ((type == null) ? 0 : type.hashCode());
- return result;
- }
- @Override
- public boolean equals(Object obj) {
- if ( this == obj ) return true;
- if ( obj == null ) return false;
- if ( getClass() != obj.getClass() ) return false;
- ResourceChange other = (ResourceChange) obj;
- if ( action != other.action ) return false;
- if ( name == null ) {
- if ( other.name != null ) return false;
- } else if ( !name.equals( other.name ) ) return false;
- if ( type != other.type ) return false;
- return true;
- }
-
- @Override
- public String toString() {
- return "ResourceChange [action=" + action + ", type=" + type + ", name=" + name + "]";
- }
-
-}
diff --git a/drools-compiler/src/main/java/org/drools/compiler/kie/util/ResourceChangeSet.java b/drools-compiler/src/main/java/org/drools/compiler/kie/util/ResourceChangeSet.java
deleted file mode 100644
index de7dca84cd5..00000000000
--- a/drools-compiler/src/main/java/org/drools/compiler/kie/util/ResourceChangeSet.java
+++ /dev/null
@@ -1,53 +0,0 @@
-package org.drools.compiler.kie.util;
-
-import java.util.ArrayList;
-import java.util.List;
-
-public class ResourceChangeSet {
- private final String resourceName; // src/main/resources/org/drools/rules.drl
- private final ChangeType status;
- private final List<ResourceChange> changes = new ArrayList<ResourceChange>();
-
- public ResourceChangeSet(String resourceName, ChangeType status) {
- this.resourceName = resourceName;
- this.status = status;
- }
-
- public String getResourceName() {
- return resourceName;
- }
-
- public ChangeType getChangeType() {
- return status;
- }
-
- public List<ResourceChange> getChanges() {
- return changes;
- }
-
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((changes == null) ? 0 : changes.hashCode());
- result = prime * result + ((resourceName == null) ? 0 : resourceName.hashCode());
- result = prime * result + ((status == null) ? 0 : status.hashCode());
- return result;
- }
-
- @Override
- public boolean equals(Object obj) {
- if ( this == obj ) return true;
- if ( obj == null ) return false;
- if ( getClass() != obj.getClass() ) return false;
- ResourceChangeSet other = (ResourceChangeSet) obj;
- if ( changes == null ) {
- if ( other.changes != null ) return false;
- } else if ( !changes.equals( other.changes ) ) return false;
- if ( resourceName == null ) {
- if ( other.resourceName != null ) return false;
- } else if ( !resourceName.equals( other.resourceName ) ) return false;
- if ( status != other.status ) return false;
- return true;
- }
-}
diff --git a/drools-compiler/src/main/java/org/drools/compiler/lang/descr/AndDescr.java b/drools-compiler/src/main/java/org/drools/compiler/lang/descr/AndDescr.java
index 6933b325d1f..a753cea6119 100644
--- a/drools-compiler/src/main/java/org/drools/compiler/lang/descr/AndDescr.java
+++ b/drools-compiler/src/main/java/org/drools/compiler/lang/descr/AndDescr.java
@@ -77,8 +77,6 @@ public boolean removeDescr(BaseDescr baseDescr) {
return baseDescr == null ? false : descrs.remove(baseDescr);
}
-
-
public String toString() {
return "[AND "+descrs+" ]";
}
diff --git a/drools-compiler/src/main/java/org/drools/compiler/lang/descr/CompositePackageDescr.java b/drools-compiler/src/main/java/org/drools/compiler/lang/descr/CompositePackageDescr.java
index b2dea3996a0..e83f9dfb0ba 100644
--- a/drools-compiler/src/main/java/org/drools/compiler/lang/descr/CompositePackageDescr.java
+++ b/drools-compiler/src/main/java/org/drools/compiler/lang/descr/CompositePackageDescr.java
@@ -1,11 +1,15 @@
package org.drools.compiler.lang.descr;
+import org.drools.compiler.compiler.PackageBuilder;
import org.kie.api.io.Resource;
+import java.util.ArrayList;
import java.util.List;
import java.util.Set;
public class CompositePackageDescr extends PackageDescr {
+
+ private CompositeAssetFilter filter;
public CompositePackageDescr() { }
@@ -94,4 +98,32 @@ private void internalAdd(Resource resource, PackageDescr packageDescr) {
}
}
}
+
+ public CompositeAssetFilter getFilter() {
+ return filter;
+ }
+
+ public void addFilter( PackageBuilder.AssetFilter f ) {
+ if( f != null ) {
+ if( filter == null ) {
+ this.filter = new CompositeAssetFilter();
+ }
+ this.filter.filters.add( f );
+ }
+ }
+
+ public static class CompositeAssetFilter implements PackageBuilder.AssetFilter {
+ public List<PackageBuilder.AssetFilter> filters = new ArrayList<PackageBuilder.AssetFilter>();
+
+ @Override
+ public Action accept(String pkgName, String assetName) {
+ for( PackageBuilder.AssetFilter filter : filters ) {
+ Action result = filter.accept(pkgName, assetName);
+ if( !Action.DO_NOTHING.equals( result ) ) {
+ return result;
+ }
+ }
+ return Action.DO_NOTHING;
+ }
+ }
}
diff --git a/drools-compiler/src/test/java/org/drools/compiler/integrationtests/IncrementalCompilationTest.java b/drools-compiler/src/test/java/org/drools/compiler/integrationtests/IncrementalCompilationTest.java
index 37fd9fde563..3c3c26d7344 100644
--- a/drools-compiler/src/test/java/org/drools/compiler/integrationtests/IncrementalCompilationTest.java
+++ b/drools-compiler/src/test/java/org/drools/compiler/integrationtests/IncrementalCompilationTest.java
@@ -1,14 +1,24 @@
package org.drools.compiler.integrationtests;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
import org.drools.compiler.CommonTestMethodBase;
import org.drools.compiler.Message;
-import org.junit.Ignore;
+import org.drools.compiler.kie.builder.impl.KieContainerImpl;
+import org.drools.core.RuleBase;
+import org.drools.core.common.InternalRuleBase;
+import org.drools.core.impl.KnowledgeBaseImpl;
+import org.drools.core.reteoo.RuleTerminalNode;
+import org.kie.api.definition.rule.Rule;
import org.junit.Test;
import org.kie.api.KieServices;
import org.kie.api.builder.KieBuilder;
import org.kie.api.builder.KieFileSystem;
import org.kie.api.builder.KieModule;
import org.kie.api.builder.ReleaseId;
+import org.kie.api.definition.KiePackage;
import org.kie.api.runtime.KieContainer;
import org.kie.api.runtime.KieSession;
import org.kie.internal.builder.IncrementalResults;
@@ -468,4 +478,80 @@ public void testIncrementalCompilationAddErrorThenEmptyWithoutError() throws Exc
assertEquals( 0, addResults2.getAddedMessages().size() );
assertEquals( 0, addResults2.getRemovedMessages().size() );
}
+
+ @Test
+ public void testRuleRemoval() throws Exception {
+ String drl1 = "package org.drools.compiler\n" +
+ "rule R1 when\n" +
+ " $m : Message()\n" +
+ "then\n" +
+ "end\n";
+
+ String drl2 = "rule R2 when\n" +
+ " $m : Message( message == \"Hi Universe\" )\n" +
+ "then\n" +
+ "end\n";
+
+ String drl3 = "rule R3 when\n" +
+ " $m : Message( message == \"Hello World\" )\n" +
+ "then\n" +
+ "end\n";
+
+ KieServices ks = KieServices.Factory.get();
+
+ // Create an in-memory jar for version 1.0.0
+ ReleaseId releaseId1 = ks.newReleaseId("org.kie", "test-upgrade", "1.0.0");
+ KieModule km = createAndDeployJar(ks, releaseId1, drl1 + drl2 +drl3 );
+
+ // Create a session and fire rules
+ KieContainer kc = ks.newKieContainer(km.getReleaseId());
+ KiePackage kpkg = ((KieContainerImpl) kc).getKieBase().getKiePackage( "org.drools.compiler");
+ assertEquals( 3, kpkg.getRules().size() );
+ Map<String, Rule> rules = rulestoMap( kpkg.getRules() );
+
+
+ assertNotNull(((org.drools.core.definitions.rule.impl.RuleImpl) rules.get("R1")));
+ assertNotNull(((org.drools.core.definitions.rule.impl.RuleImpl) rules.get("R2")));
+ assertNotNull(((org.drools.core.definitions.rule.impl.RuleImpl) rules.get("R3")));
+
+ RuleBase rb_1 = ((InternalRuleBase) ((KnowledgeBaseImpl) kc.getKieBase()).getRuleBase());
+
+ RuleTerminalNode rtn1_1 = (RuleTerminalNode) ((InternalRuleBase) ((KnowledgeBaseImpl)kc.getKieBase()).getRuleBase()).getReteooBuilder().getTerminalNodes( "R1" )[0];
+ RuleTerminalNode rtn2_1 = (RuleTerminalNode) ((InternalRuleBase) ((KnowledgeBaseImpl)kc.getKieBase()).getRuleBase()).getReteooBuilder().getTerminalNodes( "R2" )[0];
+ RuleTerminalNode rtn3_1 = (RuleTerminalNode) ((InternalRuleBase) ((KnowledgeBaseImpl)kc.getKieBase()).getRuleBase()).getReteooBuilder().getTerminalNodes( "R3" )[0];
+
+ // Create a new jar for version 1.1.0
+ ReleaseId releaseId2 = ks.newReleaseId("org.kie", "test-upgrade", "1.1.0");
+ km = createAndDeployJar( ks, releaseId2, drl1 + drl3 );
+
+ // try to update the container to version 1.1.0
+ kc.updateToVersion(releaseId2);
+
+ InternalRuleBase rb_2 = ((InternalRuleBase) ((KnowledgeBaseImpl) kc.getKieBase()).getRuleBase());
+ assertSame ( rb_1, rb_2 );
+
+ RuleTerminalNode rtn1_2 = (RuleTerminalNode) rb_2.getReteooBuilder().getTerminalNodes( "R1" )[0];
+ RuleTerminalNode rtn3_2 = (RuleTerminalNode) rb_2.getReteooBuilder().getTerminalNodes( "R3" )[0];
+ assertNull( rb_2.getReteooBuilder().getTerminalNodes( "R2" ) );
+
+ assertSame( rtn3_1, rtn3_2 );
+ assertSame( rtn1_1, rtn1_2 );
+
+ kpkg = ((KieContainerImpl) kc).getKieBase().getKiePackage( "org.drools.compiler");
+ assertEquals( 2, kpkg.getRules().size() );
+ rules = rulestoMap( kpkg.getRules() );
+
+ assertNotNull( ((org.drools.core.definitions.rule.impl.RuleImpl ) rules.get( "R1" )) );
+ assertNull(((org.drools.core.definitions.rule.impl.RuleImpl) rules.get("R2")));
+ assertNotNull(((org.drools.core.definitions.rule.impl.RuleImpl) rules.get("R3")));
+ }
+
+ private Map<String, Rule> rulestoMap(Collection<Rule> rules) {
+ Map<String, Rule> ret = new HashMap<String, Rule>();
+ for( Rule rule : rules ) {
+ ret.put( rule.getName(), rule );
+ }
+ return ret;
+ }
+
}
diff --git a/drools-compiler/src/test/java/org/drools/compiler/kie/util/ChangeSetBuilderTest.java b/drools-compiler/src/test/java/org/drools/compiler/kie/util/ChangeSetBuilderTest.java
index 30e239ea8f1..b03aa0d0a30 100644
--- a/drools-compiler/src/test/java/org/drools/compiler/kie/util/ChangeSetBuilderTest.java
+++ b/drools-compiler/src/test/java/org/drools/compiler/kie/util/ChangeSetBuilderTest.java
@@ -1,5 +1,17 @@
package org.drools.compiler.kie.util;
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.CoreMatchers.not;
+import static org.hamcrest.CoreMatchers.nullValue;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.drools.compiler.kie.builder.impl.InternalKieModule;
import org.drools.compiler.kproject.models.KieModuleModelImpl;
import org.junit.Test;
import org.kie.api.KieServices;
@@ -8,21 +20,13 @@
import org.kie.api.builder.model.KieModuleModel;
import org.kie.api.builder.model.KieSessionModel;
import org.kie.api.builder.model.KieSessionModel.KieSessionType;
-import org.drools.compiler.kie.builder.impl.InternalKieModule;
import org.kie.api.conf.EqualityBehaviorOption;
import org.kie.api.conf.EventProcessingOption;
import org.kie.api.runtime.conf.ClockTypeOption;
-import org.drools.compiler.kie.util.ResourceChange.Type;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.CoreMatchers.not;
-import static org.hamcrest.CoreMatchers.nullValue;
-import static org.junit.Assert.assertThat;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
+import org.kie.internal.builder.ChangeType;
+import org.kie.internal.builder.ResourceChange;
+import org.kie.internal.builder.ResourceChangeSet;
+import org.kie.internal.builder.ResourceChange.Type;
public class ChangeSetBuilderTest {
@@ -190,7 +194,37 @@ public void testModified2() {
// assertThat( cs.getChanges().get( 1 ), is( new ResourceChange(ChangeType.REMOVED, Type.RULE, "A removed rule") ) );
// assertThat( cs.getChanges().get( 2 ), is( new ResourceChange(ChangeType.UPDATED, Type.RULE, "An updated rule") ) );
}
+
+ @Test
+ public void testRuleRemoval() throws Exception {
+ String drl1 = "package org.drools.compiler\n" +
+ "rule R1 when\n" +
+ " $m : Message()\n" +
+ "then\n" +
+ "end\n";
+
+ String drl2 = "rule R2 when\n" +
+ " $m : Message( message == \"Hi Universe\" )\n" +
+ "then\n" +
+ "end\n";
+
+ String drl3 = "rule R3 when\n" +
+ " $m : Message( message == \"Hello World\" )\n" +
+ "then\n" +
+ "end\n";
+
+ InternalKieModule kieJar1 = createKieJar( drl1 + drl2 + drl3 );
+ InternalKieModule kieJar2 = createKieJar( drl1 + drl3 );
+ ChangeSetBuilder builder = new ChangeSetBuilder();
+ KieJarChangeSet changes = builder.build( kieJar1, kieJar2 );
+ assertEquals( 1, changes.getChanges().size() );
+
+ ResourceChangeSet rcs = changes.getChanges().values().iterator().next();
+ assertEquals( 1, rcs.getChanges().size() );
+ assertEquals( ChangeType.REMOVED, rcs.getChanges().get(0).getChangeType() );
+ }
+
private InternalKieModule createKieJar( String... drls) {
InternalKieModule kieJar = mock( InternalKieModule.class );
KieServices ks = KieServices.Factory.get();
diff --git a/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/adapters/CompositeKnowledgeBuilderAdapter.java b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/adapters/CompositeKnowledgeBuilderAdapter.java
index 3c6f60d841f..245504d41d5 100644
--- a/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/adapters/CompositeKnowledgeBuilderAdapter.java
+++ b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/adapters/CompositeKnowledgeBuilderAdapter.java
@@ -29,7 +29,7 @@ public org.drools.builder.CompositeKnowledgeBuilder add(Resource resource, Resou
}
public org.drools.builder.CompositeKnowledgeBuilder add(Resource resource, ResourceType type, ResourceConfiguration configuration) {
- delegate.add(((ResourceAdapter)resource).getDelegate(), type.toKieResourceType(), null);
+ delegate.add(((ResourceAdapter)resource).getDelegate(), type.toKieResourceType(), (org.kie.api.io.ResourceConfiguration) null);
return this;
}
|
62f5e2a99d4f5c8bebf2b7ad581cae83ac437d0b
|
orientdb
|
Minor optimization in RidBag--
|
p
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/db/record/ridbag/embedded/OEmbeddedRidBag.java b/core/src/main/java/com/orientechnologies/orient/core/db/record/ridbag/embedded/OEmbeddedRidBag.java
index 1de797ed631..a9ec16c94a0 100755
--- a/core/src/main/java/com/orientechnologies/orient/core/db/record/ridbag/embedded/OEmbeddedRidBag.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/db/record/ridbag/embedded/OEmbeddedRidBag.java
@@ -19,9 +19,20 @@
*/
package com.orientechnologies.orient.core.db.record.ridbag.embedded;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.ListIterator;
+import java.util.NoSuchElementException;
+import java.util.Set;
+import java.util.UUID;
+import java.util.WeakHashMap;
+
import com.orientechnologies.common.serialization.types.OIntegerSerializer;
import com.orientechnologies.common.util.OResettable;
import com.orientechnologies.common.util.OSizeable;
+import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.db.record.OMultiValueChangeEvent;
import com.orientechnologies.orient.core.db.record.OMultiValueChangeListener;
@@ -30,16 +41,6 @@
import com.orientechnologies.orient.core.record.ORecord;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.OLinkSerializer;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.ListIterator;
-import java.util.NoSuchElementException;
-import java.util.Set;
-import java.util.UUID;
-import java.util.WeakHashMap;
-
public class OEmbeddedRidBag implements ORidBagDelegate {
private byte[] serializedContent = null;
@@ -110,9 +111,9 @@ public void remove() {
size--;
contentWasChanged = true;
- fireCollectionChangedEvent(new OMultiValueChangeEvent<OIdentifiable, OIdentifiable>(
- OMultiValueChangeEvent.OChangeType.REMOVE, nextValue, null, nextValue));
-
+ if (!changeListeners.isEmpty())
+ fireCollectionChangedEvent(new OMultiValueChangeEvent<OIdentifiable, OIdentifiable>(
+ OMultiValueChangeEvent.OChangeType.REMOVE, nextValue, null, nextValue));
}
@Override
@@ -168,8 +169,9 @@ public void add(OIdentifiable identifiable) {
size++;
contentWasChanged = true;
- fireCollectionChangedEvent(new OMultiValueChangeEvent<OIdentifiable, OIdentifiable>(OMultiValueChangeEvent.OChangeType.ADD,
- identifiable, identifiable));
+ if (!changeListeners.isEmpty())
+ fireCollectionChangedEvent(new OMultiValueChangeEvent<OIdentifiable, OIdentifiable>(OMultiValueChangeEvent.OChangeType.ADD,
+ identifiable, identifiable));
}
public OEmbeddedRidBag copy() {
@@ -194,8 +196,9 @@ public void remove(OIdentifiable identifiable) {
size--;
contentWasChanged = true;
- fireCollectionChangedEvent(new OMultiValueChangeEvent<OIdentifiable, OIdentifiable>(
- OMultiValueChangeEvent.OChangeType.REMOVE, identifiable, null, identifiable));
+ if (!changeListeners.isEmpty())
+ fireCollectionChangedEvent(new OMultiValueChangeEvent<OIdentifiable, OIdentifiable>(
+ OMultiValueChangeEvent.OChangeType.REMOVE, identifiable, null, identifiable));
}
}
@@ -382,7 +385,9 @@ public int serialize(byte[] stream, int offset, UUID ownerUuid) {
offset += OIntegerSerializer.INT_SIZE;
}
- for (Object entry : entries) {
+ final int totEntries = entries.length;
+ for (int i = 0; i < totEntries; ++i) {
+ final Object entry = entries[i];
if (entry instanceof OIdentifiable) {
OLinkSerializer.INSTANCE.serialize((OIdentifiable) entry, stream, offset);
offset += OLinkSerializer.RID_SIZE;
@@ -426,11 +431,12 @@ protected void fireCollectionChangedEvent(final OMultiValueChangeEvent<OIdentifi
}
}
- private void addEntry(OIdentifiable identifiable) {
+ private void addEntry(final OIdentifiable identifiable) {
if (entries.length == entriesLength) {
- if (entriesLength == 0)
- entries = new Object[4];
- else {
+ if (entriesLength == 0) {
+ final int cfgValue = OGlobalConfiguration.RID_BAG_EMBEDDED_TO_SBTREEBONSAI_THRESHOLD.getValueAsInteger();
+ entries = new Object[cfgValue > 0 ? Math.min(cfgValue, 40) : 40];
+ } else {
final Object[] oldEntries = entries;
entries = new Object[entries.length << 1];
System.arraycopy(oldEntries, 0, entries, 0, oldEntries.length);
diff --git a/core/src/main/java/com/orientechnologies/orient/core/db/record/ridbag/sbtree/OSBTreeRidBag.java b/core/src/main/java/com/orientechnologies/orient/core/db/record/ridbag/sbtree/OSBTreeRidBag.java
index 7b098fa1a8f..a2d3e1a79fc 100755
--- a/core/src/main/java/com/orientechnologies/orient/core/db/record/ridbag/sbtree/OSBTreeRidBag.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/db/record/ridbag/sbtree/OSBTreeRidBag.java
@@ -367,7 +367,7 @@ public void remove() {
}
}
- if (updateOwner)
+ if (updateOwner && !changeListeners.isEmpty())
fireCollectionChangedEvent(new OMultiValueChangeEvent<OIdentifiable, OIdentifiable>(
OMultiValueChangeEvent.OChangeType.REMOVE, currentValue, null, currentValue, false));
currentRemoved = true;
@@ -614,7 +614,7 @@ public void addAll(Collection<OIdentifiable> values) {
}
}
- public void add(OIdentifiable identifiable) {
+ public void add(final OIdentifiable identifiable) {
if (identifiable.getIdentity().isValid()) {
Change counter = changes.get(identifiable);
if (counter == null)
@@ -627,7 +627,7 @@ public void add(OIdentifiable identifiable) {
counter.increment();
}
} else {
- OModifiableInteger counter = newEntries.get(identifiable);
+ final OModifiableInteger counter = newEntries.get(identifiable);
if (counter == null)
newEntries.put(identifiable, new OModifiableInteger(1));
else
@@ -637,7 +637,7 @@ public void add(OIdentifiable identifiable) {
if (size >= 0)
size++;
- if (updateOwner)
+ if (updateOwner && !changeListeners.isEmpty())
fireCollectionChangedEvent(new OMultiValueChangeEvent<OIdentifiable, OIdentifiable>(OMultiValueChangeEvent.OChangeType.ADD,
identifiable, identifiable, null, false));
}
@@ -667,7 +667,7 @@ public void remove(OIdentifiable identifiable) {
}
}
- if (updateOwner)
+ if (updateOwner && !changeListeners.isEmpty())
fireCollectionChangedEvent(new OMultiValueChangeEvent<OIdentifiable, OIdentifiable>(
OMultiValueChangeEvent.OChangeType.REMOVE, identifiable, null, identifiable, false));
}
@@ -868,15 +868,15 @@ public OBonsaiCollectionPointer getCollectionPointer() {
return collectionPointer;
}
+ public void setCollectionPointer(OBonsaiCollectionPointer collectionPointer) {
+ this.collectionPointer = collectionPointer;
+ }
+
@Override
public Set<OMultiValueChangeListener<OIdentifiable, OIdentifiable>> getChangeListeners() {
return Collections.unmodifiableSet(changeListeners);
}
- public void setCollectionPointer(OBonsaiCollectionPointer collectionPointer) {
- this.collectionPointer = collectionPointer;
- }
-
protected void fireCollectionChangedEvent(final OMultiValueChangeEvent<OIdentifiable, OIdentifiable> event) {
for (final OMultiValueChangeListener<OIdentifiable, OIdentifiable> changeListener : changeListeners) {
if (changeListener != null)
|
290343ed0276ade6737d6b2e7a2b701c3a70ce77
|
kotlin
|
refactoring--
|
p
|
https://github.com/JetBrains/kotlin
|
diff --git a/compiler/backend/src/org/jetbrains/jet/codegen/state/JetTypeMapper.java b/compiler/backend/src/org/jetbrains/jet/codegen/state/JetTypeMapper.java
index 8720b44a3f13d..32e489b5b6965 100644
--- a/compiler/backend/src/org/jetbrains/jet/codegen/state/JetTypeMapper.java
+++ b/compiler/backend/src/org/jetbrains/jet/codegen/state/JetTypeMapper.java
@@ -776,10 +776,11 @@ private JvmMethodSignature mapConstructorSignature(ConstructorDescriptor descrip
if (closure != null) {
for (Map.Entry<DeclarationDescriptor, EnclosedValueDescriptor> entry : closure.getCaptureVariables().entrySet()) {
- if (entry.getKey() instanceof VariableDescriptor && !(entry.getKey() instanceof PropertyDescriptor)) {
- Type sharedVarType = getSharedVarType(entry.getKey());
+ DeclarationDescriptor variableDescriptor = entry.getKey();
+ if (variableDescriptor instanceof VariableDescriptor && !(variableDescriptor instanceof PropertyDescriptor)) {
+ Type sharedVarType = getSharedVarType(variableDescriptor);
if (sharedVarType == null) {
- sharedVarType = mapType(((VariableDescriptor) entry.getKey()).getType());
+ sharedVarType = mapType(((VariableDescriptor) variableDescriptor).getType());
}
signatureWriter.writeParameterType(JvmMethodParameterKind.SHARED_VAR);
signatureWriter.writeAsmType(sharedVarType, false);
|
384542ccb54c28d73d9f368f2375ef60d99127ac
|
restlet-framework-java
|
- Fixed error in Conditions.getStatus()- sometimes returning 304 for methods other than HEAD and GET.- Contributed by Stephan Koops.--
|
c
|
https://github.com/restlet/restlet-framework-java
|
diff --git a/build/tmpl/text/changes.txt b/build/tmpl/text/changes.txt
index e78b997580..783366ca6f 100644
--- a/build/tmpl/text/changes.txt
+++ b/build/tmpl/text/changes.txt
@@ -33,6 +33,9 @@ Changes log
the parent resource has no 'id' attribute. Also supports
root resources with no leading slash. Patches contributed
by Vincent Ricard.
+ - Fixed error in Conditions.getStatus() sometimes returning
+ 304 for methods other than HEAD and GET. Contributed by
+ Stephan Koops.
- Misc
- Updated db4o to version 7.4.58.
- Updated FreeMarker to version 2.3.14.
diff --git a/modules/org.restlet/src/org/restlet/data/Conditions.java b/modules/org.restlet/src/org/restlet/data/Conditions.java
index 0c717a46ff..d862e06a5e 100644
--- a/modules/org.restlet/src/org/restlet/data/Conditions.java
+++ b/modules/org.restlet/src/org/restlet/data/Conditions.java
@@ -233,7 +233,11 @@ public Status getStatus(Method method, Representation representation) {
.getModificationDate()));
if (!isModifiedSince) {
- result = Status.REDIRECTION_NOT_MODIFIED;
+ if (Method.GET.equals(method) || Method.HEAD.equals(method)) {
+ result = Status.REDIRECTION_NOT_MODIFIED;
+ } else {
+ result = Status.CLIENT_ERROR_PRECONDITION_FAILED;
+ }
}
}
}
diff --git a/modules/org.restlet/src/org/restlet/data/Request.java b/modules/org.restlet/src/org/restlet/data/Request.java
index cc2e0ed6ad..9c81cbd5d5 100644
--- a/modules/org.restlet/src/org/restlet/data/Request.java
+++ b/modules/org.restlet/src/org/restlet/data/Request.java
@@ -509,9 +509,11 @@ public void setOriginalRef(Reference originalRef) {
/**
* Sets the ranges to return from the target resource's representation.
*
+ * @param ranges
+ * The ranges.
*/
- public void setRanges(List<Range> range) {
- this.ranges = range;
+ public void setRanges(List<Range> ranges) {
+ this.ranges = ranges;
}
/**
|
501a1cbb5d35609dcd97a5ce44ac60739173e479
|
spring-framework
|
Refactor from deprecated GenericTypeResolver- calls--Refactor AbstractMessageConverterMethodArgumentResolver and-BridgeMethodResolver to use ResolvableType in preference to deprecated-GenericTypeResolver calls.--Issue: SPR-10980-
|
p
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-core/src/main/java/org/springframework/core/BridgeMethodResolver.java b/spring-core/src/main/java/org/springframework/core/BridgeMethodResolver.java
index 1027d4dfc9dc..d0cf60aa8388 100644
--- a/spring-core/src/main/java/org/springframework/core/BridgeMethodResolver.java
+++ b/spring-core/src/main/java/org/springframework/core/BridgeMethodResolver.java
@@ -16,14 +16,11 @@
package org.springframework.core;
-import java.lang.reflect.GenericArrayType;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
-import java.lang.reflect.TypeVariable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import java.util.Map;
import org.springframework.util.ClassUtils;
import org.springframework.util.ReflectionUtils;
@@ -44,6 +41,7 @@
*
* @author Rob Harrop
* @author Juergen Hoeller
+ * @author Phillip Webb
* @since 2.0
*/
public abstract class BridgeMethodResolver {
@@ -86,6 +84,18 @@ public static Method findBridgedMethod(Method bridgeMethod) {
}
}
+ /**
+ * Returns {@code true} if the supplied '{@code candidateMethod}' can be
+ * consider a validate candidate for the {@link Method} that is {@link Method#isBridge() bridged}
+ * by the supplied {@link Method bridge Method}. This method performs inexpensive
+ * checks and can be used quickly filter for a set of possible matches.
+ */
+ private static boolean isBridgedCandidateFor(Method candidateMethod, Method bridgeMethod) {
+ return (!candidateMethod.isBridge() && !candidateMethod.equals(bridgeMethod) &&
+ candidateMethod.getName().equals(bridgeMethod.getName()) &&
+ candidateMethod.getParameterTypes().length == bridgeMethod.getParameterTypes().length);
+ }
+
/**
* Searches for the bridged method in the given candidates.
* @param candidateMethods the List of candidate Methods
@@ -96,11 +106,10 @@ private static Method searchCandidates(List<Method> candidateMethods, Method bri
if (candidateMethods.isEmpty()) {
return null;
}
- Map<TypeVariable, Type> typeParameterMap = GenericTypeResolver.getTypeVariableMap(bridgeMethod.getDeclaringClass());
Method previousMethod = null;
boolean sameSig = true;
for (Method candidateMethod : candidateMethods) {
- if (isBridgeMethodFor(bridgeMethod, candidateMethod, typeParameterMap)) {
+ if (isBridgeMethodFor(bridgeMethod, candidateMethod, bridgeMethod.getDeclaringClass())) {
return candidateMethod;
}
else if (previousMethod != null) {
@@ -112,28 +121,16 @@ else if (previousMethod != null) {
return (sameSig ? candidateMethods.get(0) : null);
}
- /**
- * Returns {@code true} if the supplied '{@code candidateMethod}' can be
- * consider a validate candidate for the {@link Method} that is {@link Method#isBridge() bridged}
- * by the supplied {@link Method bridge Method}. This method performs inexpensive
- * checks and can be used quickly filter for a set of possible matches.
- */
- private static boolean isBridgedCandidateFor(Method candidateMethod, Method bridgeMethod) {
- return (!candidateMethod.isBridge() && !candidateMethod.equals(bridgeMethod) &&
- candidateMethod.getName().equals(bridgeMethod.getName()) &&
- candidateMethod.getParameterTypes().length == bridgeMethod.getParameterTypes().length);
- }
-
/**
* Determines whether or not the bridge {@link Method} is the bridge for the
* supplied candidate {@link Method}.
*/
- static boolean isBridgeMethodFor(Method bridgeMethod, Method candidateMethod, Map<TypeVariable, Type> typeVariableMap) {
- if (isResolvedTypeMatch(candidateMethod, bridgeMethod, typeVariableMap)) {
+ static boolean isBridgeMethodFor(Method bridgeMethod, Method candidateMethod, Class<?> declaringClass) {
+ if (isResolvedTypeMatch(candidateMethod, bridgeMethod, declaringClass)) {
return true;
}
Method method = findGenericDeclaration(bridgeMethod);
- return (method != null && isResolvedTypeMatch(method, candidateMethod, typeVariableMap));
+ return (method != null && isResolvedTypeMatch(method, candidateMethod, declaringClass));
}
/**
@@ -167,34 +164,27 @@ private static Method findGenericDeclaration(Method bridgeMethod) {
/**
* Returns {@code true} if the {@link Type} signature of both the supplied
* {@link Method#getGenericParameterTypes() generic Method} and concrete {@link Method}
- * are equal after resolving all {@link TypeVariable TypeVariables} using the supplied
- * TypeVariable Map, otherwise returns {@code false}.
+ * are equal after resolving all types against the declaringType, otherwise
+ * returns {@code false}.
*/
private static boolean isResolvedTypeMatch(
- Method genericMethod, Method candidateMethod, Map<TypeVariable, Type> typeVariableMap) {
-
+ Method genericMethod, Method candidateMethod, Class<?> declaringClass) {
Type[] genericParameters = genericMethod.getGenericParameterTypes();
Class[] candidateParameters = candidateMethod.getParameterTypes();
if (genericParameters.length != candidateParameters.length) {
return false;
}
- for (int i = 0; i < genericParameters.length; i++) {
- Type genericParameter = genericParameters[i];
+ for (int i = 0; i < candidateParameters.length; i++) {
+ ResolvableType genericParameter = ResolvableType.forMethodParameter(genericMethod, i, declaringClass);
Class candidateParameter = candidateParameters[i];
if (candidateParameter.isArray()) {
// An array type: compare the component type.
- Type rawType = GenericTypeResolver.getRawType(genericParameter, typeVariableMap);
- if (rawType instanceof GenericArrayType) {
- if (!candidateParameter.getComponentType().equals(
- GenericTypeResolver.resolveType(((GenericArrayType) rawType).getGenericComponentType(), typeVariableMap))) {
- return false;
- }
- break;
+ if (!candidateParameter.getComponentType().equals(genericParameter.getComponentType().resolve(Object.class))) {
+ return false;
}
}
// A non-array type: compare the type itself.
- Class resolvedParameter = GenericTypeResolver.resolveType(genericParameter, typeVariableMap);
- if (!candidateParameter.equals(resolvedParameter)) {
+ if (!candidateParameter.equals(genericParameter.resolve(Object.class))) {
return false;
}
}
diff --git a/spring-core/src/test/java/org/springframework/core/BridgeMethodResolverTests.java b/spring-core/src/test/java/org/springframework/core/BridgeMethodResolverTests.java
index dbca8b7b5b3f..86ebb9b791e8 100644
--- a/spring-core/src/test/java/org/springframework/core/BridgeMethodResolverTests.java
+++ b/spring-core/src/test/java/org/springframework/core/BridgeMethodResolverTests.java
@@ -31,11 +31,11 @@
import java.util.concurrent.DelayQueue;
import java.util.concurrent.Delayed;
-import static org.junit.Assert.*;
import org.junit.Test;
-
import org.springframework.util.ReflectionUtils;
+import static org.junit.Assert.*;
+
/**
* @author Rob Harrop
* @author Juergen Hoeller
@@ -99,16 +99,16 @@ public void testFindBridgedMethodInHierarchy() throws Exception {
@Test
public void testIsBridgeMethodFor() throws Exception {
- Map<TypeVariable, Type> typeParameterMap = GenericTypeResolver.getTypeVariableMap(MyBar.class);
Method bridged = MyBar.class.getDeclaredMethod("someMethod", String.class, Object.class);
Method other = MyBar.class.getDeclaredMethod("someMethod", Integer.class, Object.class);
Method bridge = MyBar.class.getDeclaredMethod("someMethod", Object.class, Object.class);
- assertTrue("Should be bridge method", BridgeMethodResolver.isBridgeMethodFor(bridge, bridged, typeParameterMap));
- assertFalse("Should not be bridge method", BridgeMethodResolver.isBridgeMethodFor(bridge, other, typeParameterMap));
+ assertTrue("Should be bridge method", BridgeMethodResolver.isBridgeMethodFor(bridge, bridged, MyBar.class));
+ assertFalse("Should not be bridge method", BridgeMethodResolver.isBridgeMethodFor(bridge, other, MyBar.class));
}
@Test
+ @Deprecated
public void testCreateTypeVariableMap() throws Exception {
Map<TypeVariable, Type> typeVariableMap = GenericTypeResolver.getTypeVariableMap(MyBar.class);
TypeVariable<?> barT = findTypeVariable(InterBar.class, "T");
@@ -220,14 +220,14 @@ public void testSPR2583() throws Exception {
Method otherMethod = MessageBroadcasterImpl.class.getMethod("receive", NewMessageEvent.class);
assertFalse(otherMethod.isBridge());
- Map<TypeVariable, Type> typeVariableMap = GenericTypeResolver.getTypeVariableMap(MessageBroadcasterImpl.class);
- assertFalse("Match identified incorrectly", BridgeMethodResolver.isBridgeMethodFor(bridgeMethod, otherMethod, typeVariableMap));
- assertTrue("Match not found correctly", BridgeMethodResolver.isBridgeMethodFor(bridgeMethod, bridgedMethod, typeVariableMap));
+ assertFalse("Match identified incorrectly", BridgeMethodResolver.isBridgeMethodFor(bridgeMethod, otherMethod, MessageBroadcasterImpl.class));
+ assertTrue("Match not found correctly", BridgeMethodResolver.isBridgeMethodFor(bridgeMethod, bridgedMethod, MessageBroadcasterImpl.class));
assertEquals(bridgedMethod, BridgeMethodResolver.findBridgedMethod(bridgeMethod));
}
@Test
+ @Deprecated
public void testSPR2454() throws Exception {
Map<TypeVariable, Type> typeVariableMap = GenericTypeResolver.getTypeVariableMap(YourHomer.class);
TypeVariable<?> variable = findTypeVariable(MyHomer.class, "L");
@@ -768,6 +768,7 @@ public class GenericBroadcasterImpl implements Broadcaster {
}
+ @SuppressWarnings({ "unused", "unchecked" })
public abstract class GenericEventBroadcasterImpl<T extends Event> extends GenericBroadcasterImpl
implements EventBroadcaster {
@@ -835,6 +836,7 @@ public class ModifiedMessageEvent extends MessageEvent {
}
+ @SuppressWarnings("unchecked")
public class MessageBroadcasterImpl extends GenericEventBroadcasterImpl<MessageEvent>
implements MessageBroadcaster {
@@ -889,6 +891,7 @@ public interface RepositoryRegistry {
}
+ @SuppressWarnings("unchecked")
public class SettableRepositoryRegistry<R extends SimpleGenericRepository<?>>
implements RepositoryRegistry {
diff --git a/spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/AbstractMessageConverterMethodArgumentResolver.java b/spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/AbstractMessageConverterMethodArgumentResolver.java
index 14c74c607713..9874fb38a170 100644
--- a/spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/AbstractMessageConverterMethodArgumentResolver.java
+++ b/spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/AbstractMessageConverterMethodArgumentResolver.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2012 the original author or authors.
+ * Copyright 2002-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,20 +18,18 @@
import java.io.IOException;
import java.lang.reflect.Type;
-import java.lang.reflect.TypeVariable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
-import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.springframework.core.GenericTypeResolver;
import org.springframework.core.MethodParameter;
+import org.springframework.core.ResolvableType;
import org.springframework.http.HttpInputMessage;
import org.springframework.http.MediaType;
import org.springframework.http.converter.GenericHttpMessageConverter;
@@ -121,8 +119,8 @@ protected <T> Object readWithMessageConverters(HttpInputMessage inputMessage,
}
Class<?> contextClass = methodParam.getDeclaringClass();
- Map<TypeVariable, Type> map = GenericTypeResolver.getTypeVariableMap(contextClass);
- Class<T> targetClass = (Class<T>) GenericTypeResolver.resolveType(targetType, map);
+ Class<T> targetClass = (Class<T>) ResolvableType.forType(targetType,
+ ResolvableType.forMethodParameter(methodParam)).resolve();
for (HttpMessageConverter<?> converter : this.messageConverters) {
if (converter instanceof GenericHttpMessageConverter) {
|
367a2b8bf66addf6fd731c54b8436ffdd8ed9061
|
hadoop
|
HDFS-2465. Add HDFS support for fadvise readahead- and drop-behind. Contributed by Todd Lipcon.--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1190625 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 08a78a07e6a31..8bd74374ac678 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -772,6 +772,8 @@ Release 0.23.0 - Unreleased
HDFS-2500. Avoid file system operations in BPOfferService thread while
processing deletes. (todd)
+ HDFS-2465. Add HDFS support for fadvise readahead and drop-behind. (todd)
+
BUG FIXES
HDFS-2344. Fix the TestOfflineEditsViewer test failure in 0.23 branch.
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
index 9c53bc08796cd..6c10d0e8473bf 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
@@ -54,6 +54,15 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
public static final String DFS_NAMENODE_BACKUP_SERVICE_RPC_ADDRESS_KEY = "dfs.namenode.backup.dnrpc-address";
public static final String DFS_DATANODE_BALANCE_BANDWIDTHPERSEC_KEY = "dfs.datanode.balance.bandwidthPerSec";
public static final long DFS_DATANODE_BALANCE_BANDWIDTHPERSEC_DEFAULT = 1024*1024;
+ public static final String DFS_DATANODE_READAHEAD_BYTES_KEY = "dfs.datanode.readahead.bytes";
+ public static final long DFS_DATANODE_READAHEAD_BYTES_DEFAULT = 0;
+ public static final String DFS_DATANODE_DROP_CACHE_BEHIND_WRITES_KEY = "dfs.datanode.drop.cache.behind.writes";
+ public static final boolean DFS_DATANODE_DROP_CACHE_BEHIND_WRITES_DEFAULT = false;
+ public static final String DFS_DATANODE_SYNC_BEHIND_WRITES_KEY = "dfs.datanode.sync.behind.writes";
+ public static final boolean DFS_DATANODE_SYNC_BEHIND_WRITES_DEFAULT = false;
+ public static final String DFS_DATANODE_DROP_CACHE_BEHIND_READS_KEY = "dfs.datanode.drop.cache.behind.reads";
+ public static final boolean DFS_DATANODE_DROP_CACHE_BEHIND_READS_DEFAULT = false;
+
public static final String DFS_NAMENODE_HTTP_ADDRESS_KEY = "dfs.namenode.http-address";
public static final String DFS_NAMENODE_HTTP_ADDRESS_DEFAULT = "0.0.0.0:50070";
public static final String DFS_NAMENODE_RPC_ADDRESS_KEY = "dfs.namenode.rpc-address";
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
index 50e118aaa0093..b935aafd412fb 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java
@@ -24,6 +24,7 @@
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.EOFException;
+import java.io.FileDescriptor;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
@@ -46,6 +47,7 @@
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
import org.apache.hadoop.hdfs.util.DataTransferThrottler;
import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.nativeio.NativeIO;
import org.apache.hadoop.util.Daemon;
import org.apache.hadoop.util.DataChecksum;
import org.apache.hadoop.util.PureJavaCrc32;
@@ -57,10 +59,13 @@
class BlockReceiver implements Closeable {
public static final Log LOG = DataNode.LOG;
static final Log ClientTraceLog = DataNode.ClientTraceLog;
+
+ private static final long CACHE_DROP_LAG_BYTES = 8 * 1024 * 1024;
private DataInputStream in = null; // from where data are read
private DataChecksum checksum; // from where chunks of a block can be read
private OutputStream out = null; // to block file at local disk
+ private FileDescriptor outFd;
private OutputStream cout = null; // output stream for cehcksum file
private DataOutputStream checksumOut = null; // to crc file at local disk
private int bytesPerChecksum;
@@ -80,6 +85,11 @@ class BlockReceiver implements Closeable {
private final DataNode datanode;
volatile private boolean mirrorError;
+ // Cache management state
+ private boolean dropCacheBehindWrites;
+ private boolean syncBehindWrites;
+ private long lastCacheDropOffset = 0;
+
/** The client name. It is empty if a datanode is the client */
private final String clientname;
private final boolean isClient;
@@ -170,6 +180,8 @@ class BlockReceiver implements Closeable {
this.checksum = DataChecksum.newDataChecksum(in);
this.bytesPerChecksum = checksum.getBytesPerChecksum();
this.checksumSize = checksum.getChecksumSize();
+ this.dropCacheBehindWrites = datanode.shouldDropCacheBehindWrites();
+ this.syncBehindWrites = datanode.shouldSyncBehindWrites();
final boolean isCreate = isDatanode || isTransfer
|| stage == BlockConstructionStage.PIPELINE_SETUP_CREATE;
@@ -177,6 +189,12 @@ class BlockReceiver implements Closeable {
this.bytesPerChecksum, this.checksumSize);
if (streams != null) {
this.out = streams.dataOut;
+ if (out instanceof FileOutputStream) {
+ this.outFd = ((FileOutputStream)out).getFD();
+ } else {
+ LOG.warn("Could not get file descriptor for outputstream of class " +
+ out.getClass());
+ }
this.cout = streams.checksumOut;
this.checksumOut = new DataOutputStream(new BufferedOutputStream(
streams.checksumOut, HdfsConstants.SMALL_BUFFER_SIZE));
@@ -631,6 +649,8 @@ private int receivePacket(long offsetInBlock, long seqno,
);
datanode.metrics.incrBytesWritten(len);
+
+ dropOsCacheBehindWriter(offsetInBlock);
}
} catch (IOException iex) {
datanode.checkDiskError(iex);
@@ -645,6 +665,28 @@ private int receivePacket(long offsetInBlock, long seqno,
return lastPacketInBlock?-1:len;
}
+ private void dropOsCacheBehindWriter(long offsetInBlock) throws IOException {
+ try {
+ if (outFd != null &&
+ offsetInBlock > lastCacheDropOffset + CACHE_DROP_LAG_BYTES) {
+ long twoWindowsAgo = lastCacheDropOffset - CACHE_DROP_LAG_BYTES;
+ if (twoWindowsAgo > 0 && dropCacheBehindWrites) {
+ NativeIO.posixFadviseIfPossible(outFd, 0, lastCacheDropOffset,
+ NativeIO.POSIX_FADV_DONTNEED);
+ }
+
+ if (syncBehindWrites) {
+ NativeIO.syncFileRangeIfPossible(outFd, lastCacheDropOffset, CACHE_DROP_LAG_BYTES,
+ NativeIO.SYNC_FILE_RANGE_WRITE);
+ }
+
+ lastCacheDropOffset += CACHE_DROP_LAG_BYTES;
+ }
+ } catch (Throwable t) {
+ LOG.warn("Couldn't drop os cache behind writer for " + block, t);
+ }
+ }
+
void writeChecksumHeader(DataOutputStream mirrorOut) throws IOException {
checksum.writeHeader(mirrorOut);
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java
index 84b38b37e9a14..ca9765ce3ea0f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java
@@ -20,6 +20,7 @@
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
+import java.io.FileDescriptor;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
@@ -36,6 +37,9 @@
import org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
import org.apache.hadoop.hdfs.util.DataTransferThrottler;
import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.ReadaheadPool;
+import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest;
+import org.apache.hadoop.io.nativeio.NativeIO;
import org.apache.hadoop.net.SocketOutputStream;
import org.apache.hadoop.util.DataChecksum;
@@ -118,7 +122,9 @@ class BlockSender implements java.io.Closeable {
private DataInputStream checksumIn;
/** Checksum utility */
private final DataChecksum checksum;
- /** Starting position to read */
+ /** Initial position to read */
+ private long initialOffset;
+ /** Current position of read */
private long offset;
/** Position of last byte to read from block file */
private final long endOffset;
@@ -142,6 +148,24 @@ class BlockSender implements java.io.Closeable {
private final String clientTraceFmt;
private volatile ChunkChecksum lastChunkChecksum = null;
+ /** The file descriptor of the block being sent */
+ private FileDescriptor blockInFd;
+
+ // Cache-management related fields
+ private final long readaheadLength;
+ private boolean shouldDropCacheBehindRead;
+ private ReadaheadRequest curReadahead;
+ private long lastCacheDropOffset;
+ private static final long CACHE_DROP_INTERVAL_BYTES = 1024 * 1024; // 1MB
+ /**
+ * Minimum length of read below which management of the OS
+ * buffer cache is disabled.
+ */
+ private static final long LONG_READ_THRESHOLD_BYTES = 256 * 1024;
+
+ private static ReadaheadPool readaheadPool =
+ ReadaheadPool.getInstance();
+
/**
* Constructor
*
@@ -165,6 +189,8 @@ class BlockSender implements java.io.Closeable {
this.corruptChecksumOk = corruptChecksumOk;
this.verifyChecksum = verifyChecksum;
this.clientTraceFmt = clientTraceFmt;
+ this.readaheadLength = datanode.getReadaheadLength();
+ this.shouldDropCacheBehindRead = datanode.shouldDropCacheBehindReads();
synchronized(datanode.data) {
this.replica = getReplica(block, datanode);
@@ -277,6 +303,11 @@ class BlockSender implements java.io.Closeable {
DataNode.LOG.debug("replica=" + replica);
}
blockIn = datanode.data.getBlockInputStream(block, offset); // seek to offset
+ if (blockIn instanceof FileInputStream) {
+ blockInFd = ((FileInputStream)blockIn).getFD();
+ } else {
+ blockInFd = null;
+ }
} catch (IOException ioe) {
IOUtils.closeStream(this);
IOUtils.closeStream(blockIn);
@@ -288,6 +319,20 @@ class BlockSender implements java.io.Closeable {
* close opened files.
*/
public void close() throws IOException {
+ if (blockInFd != null && shouldDropCacheBehindRead) {
+ // drop the last few MB of the file from cache
+ try {
+ NativeIO.posixFadviseIfPossible(
+ blockInFd, lastCacheDropOffset, offset - lastCacheDropOffset,
+ NativeIO.POSIX_FADV_DONTNEED);
+ } catch (Exception e) {
+ LOG.warn("Unable to drop cache on file close", e);
+ }
+ }
+ if (curReadahead != null) {
+ curReadahead.cancel();
+ }
+
IOException ioe = null;
if(checksumIn!=null) {
try {
@@ -304,6 +349,7 @@ public void close() throws IOException {
ioe = e;
}
blockIn = null;
+ blockInFd = null;
}
// throw IOException if there is any
if(ioe!= null) {
@@ -538,10 +584,20 @@ long sendBlock(DataOutputStream out, OutputStream baseStream,
if (out == null) {
throw new IOException( "out stream is null" );
}
- final long initialOffset = offset;
+ initialOffset = offset;
long totalRead = 0;
OutputStream streamForSendChunks = out;
+ lastCacheDropOffset = initialOffset;
+
+ if (isLongRead() && blockInFd != null) {
+ // Advise that this file descriptor will be accessed sequentially.
+ NativeIO.posixFadviseIfPossible(blockInFd, 0, 0, NativeIO.POSIX_FADV_SEQUENTIAL);
+ }
+
+ // Trigger readahead of beginning of file if configured.
+ manageOsCache();
+
final long startTime = ClientTraceLog.isInfoEnabled() ? System.nanoTime() : 0;
try {
writeChecksumHeader(out);
@@ -569,6 +625,7 @@ long sendBlock(DataOutputStream out, OutputStream baseStream,
ByteBuffer pktBuf = ByteBuffer.allocate(pktSize);
while (endOffset > offset) {
+ manageOsCache();
long len = sendPacket(pktBuf, maxChunksPerPacket, streamForSendChunks,
transferTo, throttler);
offset += len;
@@ -595,6 +652,45 @@ long sendBlock(DataOutputStream out, OutputStream baseStream,
}
return totalRead;
}
+
+ /**
+ * Manage the OS buffer cache by performing read-ahead
+ * and drop-behind.
+ */
+ private void manageOsCache() throws IOException {
+ if (!isLongRead() || blockInFd == null) {
+ // don't manage cache manually for short-reads, like
+ // HBase random read workloads.
+ return;
+ }
+
+ // Perform readahead if necessary
+ if (readaheadLength > 0 && readaheadPool != null) {
+ curReadahead = readaheadPool.readaheadStream(
+ clientTraceFmt, blockInFd,
+ offset, readaheadLength, Long.MAX_VALUE,
+ curReadahead);
+ }
+
+ // Drop what we've just read from cache, since we aren't
+ // likely to need it again
+ long nextCacheDropOffset = lastCacheDropOffset + CACHE_DROP_INTERVAL_BYTES;
+ if (shouldDropCacheBehindRead &&
+ offset >= nextCacheDropOffset) {
+ long dropLength = offset - lastCacheDropOffset;
+ if (dropLength >= 1024) {
+ NativeIO.posixFadviseIfPossible(blockInFd,
+ lastCacheDropOffset, dropLength,
+ NativeIO.POSIX_FADV_DONTNEED);
+ }
+ lastCacheDropOffset += CACHE_DROP_INTERVAL_BYTES;
+ }
+ }
+
+ private boolean isLongRead() {
+ return (endOffset - offset) > LONG_READ_THRESHOLD_BYTES;
+ }
+
/**
* Write checksum header to the output stream
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
index 3f7733608999c..5be82dd59d8cf 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
@@ -104,6 +104,7 @@
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HDFSPolicyProvider;
import org.apache.hadoop.hdfs.HdfsConfiguration;
@@ -410,6 +411,11 @@ void refreshNamenodes(Configuration conf)
int socketTimeout;
int socketWriteTimeout = 0;
boolean transferToAllowed = true;
+ private boolean dropCacheBehindWrites = false;
+ private boolean syncBehindWrites = false;
+ private boolean dropCacheBehindReads = false;
+ private long readaheadLength = 0;
+
int writePacketSize = 0;
boolean isBlockTokenEnabled;
BlockPoolTokenSecretManager blockPoolTokenSecretManager;
@@ -493,6 +499,20 @@ private void initConfig(Configuration conf) {
DFS_DATANODE_TRANSFERTO_ALLOWED_DEFAULT);
this.writePacketSize = conf.getInt(DFS_CLIENT_WRITE_PACKET_SIZE_KEY,
DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT);
+
+ this.readaheadLength = conf.getLong(
+ DFSConfigKeys.DFS_DATANODE_READAHEAD_BYTES_KEY,
+ DFSConfigKeys.DFS_DATANODE_READAHEAD_BYTES_DEFAULT);
+ this.dropCacheBehindWrites = conf.getBoolean(
+ DFSConfigKeys.DFS_DATANODE_DROP_CACHE_BEHIND_WRITES_KEY,
+ DFSConfigKeys.DFS_DATANODE_DROP_CACHE_BEHIND_WRITES_DEFAULT);
+ this.syncBehindWrites = conf.getBoolean(
+ DFSConfigKeys.DFS_DATANODE_SYNC_BEHIND_WRITES_KEY,
+ DFSConfigKeys.DFS_DATANODE_SYNC_BEHIND_WRITES_DEFAULT);
+ this.dropCacheBehindReads = conf.getBoolean(
+ DFSConfigKeys.DFS_DATANODE_DROP_CACHE_BEHIND_READS_KEY,
+ DFSConfigKeys.DFS_DATANODE_DROP_CACHE_BEHIND_READS_DEFAULT);
+
this.blockReportInterval = conf.getLong(DFS_BLOCKREPORT_INTERVAL_MSEC_KEY,
DFS_BLOCKREPORT_INTERVAL_MSEC_DEFAULT);
this.initialBlockReportDelay = conf.getLong(
@@ -2859,4 +2879,20 @@ public Long getBalancerBandwidth() {
(DataXceiverServer) this.dataXceiverServer.getRunnable();
return dxcs.balanceThrottler.getBandwidth();
}
+
+ long getReadaheadLength() {
+ return readaheadLength;
+ }
+
+ boolean shouldDropCacheBehindWrites() {
+ return dropCacheBehindWrites;
+ }
+
+ boolean shouldDropCacheBehindReads() {
+ return dropCacheBehindReads;
+ }
+
+ boolean shouldSyncBehindWrites() {
+ return syncBehindWrites;
+ }
}
|
6188550a4817e1f8f0f024034d0f0b5f03b6ecc3
|
spring-framework
|
ServletRequestAttributes skips well-known- immutable values when updating accessed session attributes--Issue: SPR-11738-
|
p
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-web/src/main/java/org/springframework/web/context/request/ServletRequestAttributes.java b/spring-web/src/main/java/org/springframework/web/context/request/ServletRequestAttributes.java
index 8ad2fdb9979d..20a17c3fcebd 100644
--- a/spring-web/src/main/java/org/springframework/web/context/request/ServletRequestAttributes.java
+++ b/spring-web/src/main/java/org/springframework/web/context/request/ServletRequestAttributes.java
@@ -248,7 +248,7 @@ protected void updateAccessedSessionAttributes() {
String name = entry.getKey();
Object newValue = entry.getValue();
Object oldValue = this.session.getAttribute(name);
- if (oldValue == newValue) {
+ if (oldValue == newValue && !isImmutableSessionAttribute(name, newValue)) {
this.session.setAttribute(name, newValue);
}
}
@@ -260,6 +260,23 @@ protected void updateAccessedSessionAttributes() {
this.sessionAttributesToUpdate.clear();
}
+ /**
+ * Determine whether the given value is to be considered as an immutable session
+ * attribute, that is, doesn't have to be re-set via {@code session.setAttribute}
+ * since its value cannot meaningfully change internally.
+ * <p>The default implementation returns {@code true} for {@code String},
+ * {@code Character}, {@code Boolean} and {@code Number} values.
+ * @param name the name of the attribute
+ * @param value the corresponding value to check
+ * @return {@code true} if the value is to be considered as immutable for the
+ * purposes of session attribute management; {@code false} otherwise
+ * @see #updateAccessedSessionAttributes()
+ */
+ protected boolean isImmutableSessionAttribute(String name, Object value) {
+ return (value instanceof String || value instanceof Character ||
+ value instanceof Boolean || value instanceof Number);
+ }
+
/**
* Register the given callback as to be executed after session termination.
* <p>Note: The callback object should be serializable in order to survive
diff --git a/spring-web/src/test/java/org/springframework/web/context/request/ServletRequestAttributesTests.java b/spring-web/src/test/java/org/springframework/web/context/request/ServletRequestAttributesTests.java
index 2e5688857f82..226b05d3d1ef 100644
--- a/spring-web/src/test/java/org/springframework/web/context/request/ServletRequestAttributesTests.java
+++ b/spring-web/src/test/java/org/springframework/web/context/request/ServletRequestAttributesTests.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2013 the original author or authors.
+ * Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,10 +17,12 @@
package org.springframework.web.context.request;
import java.io.Serializable;
-
+import java.math.BigInteger;
import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpSession;
import org.junit.Test;
+
import org.springframework.mock.web.test.MockHttpServletRequest;
import org.springframework.mock.web.test.MockHttpSession;
@@ -39,23 +41,12 @@ public class ServletRequestAttributesTests {
private static final Serializable VALUE = new Serializable() {
};
+
@Test(expected = IllegalArgumentException.class)
public void ctorRejectsNullArg() throws Exception {
new ServletRequestAttributes(null);
}
- @Test
- public void updateAccessedAttributes() throws Exception {
- MockHttpSession session = new MockHttpSession();
- session.setAttribute(KEY, VALUE);
- MockHttpServletRequest request = new MockHttpServletRequest();
- request.setSession(session);
- ServletRequestAttributes attrs = new ServletRequestAttributes(request);
- Object value = attrs.getAttribute(KEY, RequestAttributes.SCOPE_SESSION);
- assertSame(VALUE, value);
- attrs.requestCompleted();
- }
-
@Test
public void setRequestScopedAttribute() throws Exception {
MockHttpServletRequest request = new MockHttpServletRequest();
@@ -162,4 +153,64 @@ public void removeSessionScopedAttributeDoesNotForceCreationOfSession() throws E
verify(request).getSession(false);
}
+ @Test
+ public void updateAccessedAttributes() throws Exception {
+ HttpServletRequest request = mock(HttpServletRequest.class);
+ HttpSession session = mock(HttpSession.class);
+ when(request.getSession(anyBoolean())).thenReturn(session);
+ when(session.getAttribute(KEY)).thenReturn(VALUE);
+
+ ServletRequestAttributes attrs = new ServletRequestAttributes(request);
+ assertSame(VALUE, attrs.getAttribute(KEY, RequestAttributes.SCOPE_SESSION));
+ attrs.requestCompleted();
+
+ verify(session, times(2)).getAttribute(KEY);
+ verify(session).setAttribute(KEY, VALUE);
+ verifyNoMoreInteractions(session);
+ }
+
+ @Test
+ public void skipImmutableString() {
+ doSkipImmutableValue("someString");
+ }
+
+ @Test
+ public void skipImmutableCharacter() {
+ doSkipImmutableValue(new Character('x'));
+ }
+
+ @Test
+ public void skipImmutableBoolean() {
+ doSkipImmutableValue(Boolean.TRUE);
+ }
+
+ @Test
+ public void skipImmutableInteger() {
+ doSkipImmutableValue(new Integer(1));
+ }
+
+ @Test
+ public void skipImmutableFloat() {
+ doSkipImmutableValue(new Float(1.1));
+ }
+
+ @Test
+ public void skipImmutableBigInteger() {
+ doSkipImmutableValue(new BigInteger("1"));
+ }
+
+ private void doSkipImmutableValue(Object immutableValue) {
+ HttpServletRequest request = mock(HttpServletRequest.class);
+ HttpSession session = mock(HttpSession.class);
+ when(request.getSession(anyBoolean())).thenReturn(session);
+ when(session.getAttribute(KEY)).thenReturn(immutableValue);
+
+ ServletRequestAttributes attrs = new ServletRequestAttributes(request);
+ attrs.getAttribute(KEY, RequestAttributes.SCOPE_SESSION);
+ attrs.requestCompleted();
+
+ verify(session, times(2)).getAttribute(KEY);
+ verifyNoMoreInteractions(session);
+ }
+
}
|
1f47d355234777b707191b9c8e813c0000ecf212
|
hadoop
|
YARN-2059. Added admin ACLs support to Timeline- Server. Contributed by Zhijie Shen. svn merge --ignore-ancestry -c 1597207- ../../trunk/--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1597208 13f79535-47bb-0310-9956-ffa450edef68-
|
a
|
https://github.com/apache/hadoop
|
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index e34b930003f51..c74e777d400fc 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -87,6 +87,9 @@ Release 2.5.0 - UNRELEASED
YARN-2012. Fair Scheduler: allow default queue placement rule to take an
arbitrary queue (Ashwin Shankar via Sandy Ryza)
+ YARN-2059. Added admin ACLs support to Timeline Server. (Zhijie Shen via
+ vinodkv)
+
OPTIMIZATIONS
BUG FIXES
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineACLsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineACLsManager.java
index 5bc8705222f88..8009b39c94f72 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineACLsManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineACLsManager.java
@@ -27,8 +27,8 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
+import org.apache.hadoop.yarn.security.AdminACLsManager;
import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.EntityIdentifier;
import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore.SystemFilter;
@@ -42,11 +42,10 @@ public class TimelineACLsManager {
private static final Log LOG = LogFactory.getLog(TimelineACLsManager.class);
- private boolean aclsEnabled;
+ private AdminACLsManager adminAclsManager;
public TimelineACLsManager(Configuration conf) {
- aclsEnabled = conf.getBoolean(YarnConfiguration.YARN_ACL_ENABLE,
- YarnConfiguration.DEFAULT_YARN_ACL_ENABLE);
+ this.adminAclsManager = new AdminACLsManager(conf);
}
public boolean checkAccess(UserGroupInformation callerUGI,
@@ -57,7 +56,7 @@ public boolean checkAccess(UserGroupInformation callerUGI,
+ new EntityIdentifier(entity.getEntityId(), entity.getEntityType()));
}
- if (!aclsEnabled) {
+ if (!adminAclsManager.areACLsEnabled()) {
return true;
}
@@ -70,10 +69,12 @@ public boolean checkAccess(UserGroupInformation callerUGI,
+ " is corrupted.");
}
String owner = values.iterator().next().toString();
- // TODO: Currently we just check the user is the timeline entity owner. In
- // the future, we need to check whether the user is admin or is in the
+ // TODO: Currently we just check the user is the admin or the timeline
+ // entity owner. In the future, we need to check whether the user is in the
// allowed user/group list
- if (callerUGI != null && callerUGI.getShortUserName().equals(owner)) {
+ if (callerUGI != null
+ && (adminAclsManager.isAdmin(callerUGI) ||
+ callerUGI.getShortUserName().equals(owner))) {
return true;
}
return false;
@@ -81,8 +82,11 @@ public boolean checkAccess(UserGroupInformation callerUGI,
@Private
@VisibleForTesting
- public void setACLsEnabled(boolean aclsEnabled) {
- this.aclsEnabled = aclsEnabled;
+ public AdminACLsManager
+ setAdminACLsManager(AdminACLsManager adminAclsManager) {
+ AdminACLsManager oldAdminACLsManager = this.adminAclsManager;
+ this.adminAclsManager = adminAclsManager;
+ return oldAdminACLsManager;
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
index 6041779b13a94..5d749fa090653 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
@@ -346,8 +346,9 @@ public TimelinePutResponse postEntities(
new EntityIdentifier(entity.getEntityId(), entity.getEntityType());
// check if there is existing entity
+ TimelineEntity existingEntity = null;
try {
- TimelineEntity existingEntity =
+ existingEntity =
store.getEntity(entityID.getId(), entityID.getType(),
EnumSet.of(Field.PRIMARY_FILTERS));
if (existingEntity != null
@@ -369,10 +370,14 @@ public TimelinePutResponse postEntities(
continue;
}
- // inject owner information for the access check
+ // inject owner information for the access check if this is the first
+ // time to post the entity, in case it's the admin who is updating
+ // the timeline data.
try {
- injectOwnerInfo(entity,
- callerUGI == null ? "" : callerUGI.getShortUserName());
+ if (existingEntity == null) {
+ injectOwnerInfo(entity,
+ callerUGI == null ? "" : callerUGI.getShortUserName());
+ }
} catch (YarnException e) {
// Skip the entity which messes up the primary filter and record the
// error
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TestTimelineACLsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TestTimelineACLsManager.java
index 2c536681aed25..39102b43badfa 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TestTimelineACLsManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TestTimelineACLsManager.java
@@ -49,6 +49,7 @@ public void testYarnACLsNotEnabled() throws Exception {
public void testYarnACLsEnabled() throws Exception {
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true);
+ conf.set(YarnConfiguration.YARN_ADMIN_ACL, "admin");
TimelineACLsManager timelineACLsManager =
new TimelineACLsManager(conf);
TimelineEntity entity = new TimelineEntity();
@@ -63,12 +64,17 @@ public void testYarnACLsEnabled() throws Exception {
"Other shouldn't be allowed to access",
timelineACLsManager.checkAccess(
UserGroupInformation.createRemoteUser("other"), entity));
+ Assert.assertTrue(
+ "Admin should be allowed to access",
+ timelineACLsManager.checkAccess(
+ UserGroupInformation.createRemoteUser("admin"), entity));
}
@Test
public void testCorruptedOwnerInfo() throws Exception {
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true);
+ conf.set(YarnConfiguration.YARN_ADMIN_ACL, "owner");
TimelineACLsManager timelineACLsManager =
new TimelineACLsManager(conf);
TimelineEntity entity = new TimelineEntity();
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java
index 9b0ae3761d26a..7e3e409940386 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java
@@ -40,6 +40,7 @@
import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents;
import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.security.AdminACLsManager;
import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TestMemoryTimelineStore;
import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore;
import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security.TimelineACLsManager;
@@ -64,6 +65,7 @@ public class TestTimelineWebServices extends JerseyTest {
private static TimelineStore store;
private static TimelineACLsManager timelineACLsManager;
+ private static AdminACLsManager adminACLsManager;
private static String remoteUser;
private long beforeTime;
@@ -83,6 +85,9 @@ protected void configureServlets() {
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, false);
timelineACLsManager = new TimelineACLsManager(conf);
+ conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true);
+ conf.set(YarnConfiguration.YARN_ADMIN_ACL, "admin");
+ adminACLsManager = new AdminACLsManager(conf);
bind(TimelineACLsManager.class).toInstance(timelineACLsManager);
serve("/*").with(GuiceContainer.class);
filter("/*").through(TestFilter.class);
@@ -387,7 +392,8 @@ public void testPostEntities() throws Exception {
@Test
public void testPostEntitiesWithYarnACLsEnabled() throws Exception {
- timelineACLsManager.setACLsEnabled(true);
+ AdminACLsManager oldAdminACLsManager =
+ timelineACLsManager.setAdminACLsManager(adminACLsManager);
remoteUser = "tester";
try {
TimelineEntities entities = new TimelineEntities();
@@ -419,14 +425,15 @@ public void testPostEntitiesWithYarnACLsEnabled() throws Exception {
Assert.assertEquals(TimelinePutResponse.TimelinePutError.ACCESS_DENIED,
putResponse.getErrors().get(0).getErrorCode());
} finally {
- timelineACLsManager.setACLsEnabled(false);
+ timelineACLsManager.setAdminACLsManager(oldAdminACLsManager);
remoteUser = null;
}
}
@Test
public void testGetEntityWithYarnACLsEnabled() throws Exception {
- timelineACLsManager.setACLsEnabled(true);
+ AdminACLsManager oldAdminACLsManager =
+ timelineACLsManager.setAdminACLsManager(adminACLsManager);
remoteUser = "tester";
try {
TimelineEntities entities = new TimelineEntities();
@@ -481,14 +488,15 @@ public void testGetEntityWithYarnACLsEnabled() throws Exception {
assertEquals(ClientResponse.Status.NOT_FOUND,
response.getClientResponseStatus());
} finally {
- timelineACLsManager.setACLsEnabled(false);
+ timelineACLsManager.setAdminACLsManager(oldAdminACLsManager);
remoteUser = null;
}
}
@Test
public void testGetEntitiesWithYarnACLsEnabled() {
- timelineACLsManager.setACLsEnabled(true);
+ AdminACLsManager oldAdminACLsManager =
+ timelineACLsManager.setAdminACLsManager(adminACLsManager);
remoteUser = "tester";
try {
TimelineEntities entities = new TimelineEntities();
@@ -526,14 +534,15 @@ public void testGetEntitiesWithYarnACLsEnabled() {
assertEquals("test type 4", entities.getEntities().get(0).getEntityType());
assertEquals("test id 5", entities.getEntities().get(0).getEntityId());
} finally {
- timelineACLsManager.setACLsEnabled(false);
+ timelineACLsManager.setAdminACLsManager(oldAdminACLsManager);
remoteUser = null;
}
}
@Test
public void testGetEventsWithYarnACLsEnabled() {
- timelineACLsManager.setACLsEnabled(true);
+ AdminACLsManager oldAdminACLsManager =
+ timelineACLsManager.setAdminACLsManager(adminACLsManager);
remoteUser = "tester";
try {
TimelineEntities entities = new TimelineEntities();
@@ -579,7 +588,7 @@ public void testGetEventsWithYarnACLsEnabled() {
assertEquals(1, events.getAllEvents().size());
assertEquals("test id 6", events.getAllEvents().get(0).getEntityId());
} finally {
- timelineACLsManager.setACLsEnabled(false);
+ timelineACLsManager.setAdminACLsManager(oldAdminACLsManager);
remoteUser = null;
}
}
|
3980e032581824d7241748c7ec56a916fdce6261
|
orientdb
|
Improved memory usage and optimized general speed--
|
p
|
https://github.com/orientechnologies/orientdb
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/OMemoryWatchDog.java b/core/src/main/java/com/orientechnologies/orient/core/OMemoryWatchDog.java
index 4aeb1c5992c..20972ad28ee 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/OMemoryWatchDog.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/OMemoryWatchDog.java
@@ -73,12 +73,14 @@ public OMemoryWatchDog(final float iThreshold) {
public void handleNotification(Notification n, Object hb) {
if (n.getType().equals(MemoryNotificationInfo.MEMORY_THRESHOLD_EXCEEDED)) {
alertTimes++;
- final long maxMemory = tenuredGenPool.getUsage().getMax();
+ long maxMemory = tenuredGenPool.getUsage().getMax();
long usedMemory = tenuredGenPool.getUsage().getUsed();
long freeMemory = maxMemory - usedMemory;
- OLogManager.instance().debug(this, "Low memory %s%% (used %s of %s), calling listeners to free memory in soft way...",
- freeMemory * 100 / maxMemory, OFileUtils.getSizeAsString(usedMemory), OFileUtils.getSizeAsString(maxMemory));
+ OLogManager.instance().debug(this,
+ "Free memory is low %s %s%% (used %s of %s), calling listeners to free memory in SOFT way...",
+ OFileUtils.getSizeAsString(freeMemory), freeMemory * 100 / maxMemory, OFileUtils.getSizeAsString(usedMemory),
+ OFileUtils.getSizeAsString(maxMemory));
final long timer = OProfiler.getInstance().startChrono();
@@ -90,30 +92,44 @@ public void handleNotification(Notification n, Object hb) {
}
}
- System.gc();
- try {
- Thread.sleep(400);
- } catch (InterruptedException e) {
- }
+ long threshold;
+ do {
+ // INVOKE GC AND WAIT A BIT
+ System.gc();
+ try {
+ Thread.sleep(400);
+ } catch (InterruptedException e) {
+ }
- freeMemory = Runtime.getRuntime().freeMemory();
- usedMemory = maxMemory - freeMemory;
- final long threshold = (long) (maxMemory * (1 - OGlobalConfiguration.MEMORY_OPTIMIZE_THRESHOLD.getValueAsFloat()));
-
- if (freeMemory < threshold) {
- OLogManager.instance().info(this,
- "Low memory %s%% (used %s of %s) while the threshold is %s, calling listeners to free memory in hard way...",
- usedMemory * 100 / maxMemory, OFileUtils.getSizeAsString(usedMemory), OFileUtils.getSizeAsString(maxMemory),
- OFileUtils.getSizeAsString(threshold));
-
- for (Listener listener : listeners) {
- try {
- listener.memoryUsageCritical(TYPE.JVM, usedMemory, maxMemory);
- } catch (Exception e) {
- e.printStackTrace();
+ // RECHECK IF MEMORY IS OK NOW
+ maxMemory = tenuredGenPool.getUsage().getMax();
+ usedMemory = tenuredGenPool.getUsage().getUsed();
+ freeMemory = maxMemory - usedMemory;
+
+ threshold = (long) (maxMemory * (1 - OGlobalConfiguration.MEMORY_OPTIMIZE_THRESHOLD.getValueAsFloat()));
+
+ OLogManager.instance().debug(this, "Free memory now is %s %s%% (used %s of %s) with threshold for HARD clean is %s",
+ OFileUtils.getSizeAsString(freeMemory), freeMemory * 100 / maxMemory, OFileUtils.getSizeAsString(usedMemory),
+ OFileUtils.getSizeAsString(maxMemory), OFileUtils.getSizeAsString(threshold));
+
+ if (freeMemory < threshold) {
+ OLogManager
+ .instance()
+ .debug(
+ this,
+ "Free memory is low %s %s%% (used %s of %s) while the threshold is %s, calling listeners to free memory in HARD way...",
+ OFileUtils.getSizeAsString(freeMemory), freeMemory * 100 / maxMemory, OFileUtils.getSizeAsString(usedMemory),
+ OFileUtils.getSizeAsString(maxMemory), OFileUtils.getSizeAsString(threshold));
+
+ for (Listener listener : listeners) {
+ try {
+ listener.memoryUsageCritical(TYPE.JVM, usedMemory, maxMemory);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
}
}
- }
+ } while (freeMemory < threshold);
OProfiler.getInstance().stopChrono("OMemoryWatchDog.freeResources", timer);
}
diff --git a/core/src/main/java/com/orientechnologies/orient/core/cache/OAbstractRecordCache.java b/core/src/main/java/com/orientechnologies/orient/core/cache/OAbstractRecordCache.java
index 962c5f75f6d..ec68d81c627 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/cache/OAbstractRecordCache.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/cache/OAbstractRecordCache.java
@@ -156,7 +156,7 @@ public void memoryUsageLow(TYPE iType, final long usedMemory, final long maxMemo
// UNACTIVE
return;
- final int threshold = (int) (oldSize * 0.5f);
+ final int threshold = (int) (oldSize * 0.9f);
entries.removeEldestItems(threshold);
diff --git a/core/src/main/java/com/orientechnologies/orient/core/config/OGlobalConfiguration.java b/core/src/main/java/com/orientechnologies/orient/core/config/OGlobalConfiguration.java
index 531880f0f17..4de9330e56f 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/config/OGlobalConfiguration.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/config/OGlobalConfiguration.java
@@ -313,7 +313,7 @@ private static void autoConfig() {
// WINDOWS
// AVOID TO USE MMAP, SINCE COULD BE BUGGY
- //FILE_MMAP_STRATEGY.setValue(3);
+ FILE_MMAP_STRATEGY.setValue(3);
}
if (System.getProperty("os.arch").indexOf("64") > -1) {
diff --git a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexMVRBTreeAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexMVRBTreeAbstract.java
index 0fefb774c0f..be054f60453 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/index/OIndexMVRBTreeAbstract.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/index/OIndexMVRBTreeAbstract.java
@@ -84,7 +84,7 @@ public void memoryUsageLow(final TYPE iType, final long usedMemory, final long m
map.setMaxUpdatesBeforeSave(maxUpdates);
- optimize();
+ optimize(false);
} finally {
releaseExclusiveLock();
}
@@ -97,14 +97,14 @@ public void memoryUsageCritical(final TYPE iType, final long usedMemory, final l
if (map != null) {
acquireExclusiveLock();
try {
- // REDUCE SOME PARAMETERS
+ // REDUCE OF 10% LAZY UPDATES
int maxUpdates = map.getMaxUpdatesBeforeSave();
if (maxUpdates > 10)
- maxUpdates *= 0.5;
+ maxUpdates *= 0.50;
map.setMaxUpdatesBeforeSave(maxUpdates);
- optimize();
+ optimize(true);
} finally {
releaseExclusiveLock();
}
@@ -112,13 +112,15 @@ public void memoryUsageCritical(final TYPE iType, final long usedMemory, final l
}
}
- private void optimize() {
+ private void optimize(final boolean iHardMode) {
OLogManager.instance().debug(this, "Forcing optimization of Index %s (%d items). Found %d entries in memory...", name,
map.size(), map.getInMemoryEntries());
+ if (iHardMode)
+ map.freeInMemoryResources();
map.optimize(true);
- OLogManager.instance().debug(this, "Completed! Now %d entries resides in memory", map.getInMemoryEntries());
+ OLogManager.instance().debug(this, "Completed! Now %d entries reside in memory", map.getInMemoryEntries());
}
};
diff --git a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OUser.java b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OUser.java
index 840f4615673..02bca7f54a3 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OUser.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/metadata/security/OUser.java
@@ -80,9 +80,10 @@ public void fromStream(final ODocument iSource) {
roles = new HashSet<ORole>();
final Set<ODocument> loadedRoles = iSource.field("roles");
- for (ODocument d : loadedRoles) {
- roles.add(document.getDatabase().getMetadata().getSecurity().getRole((String) d.field("name")));
- }
+ if (loadedRoles != null)
+ for (ODocument d : loadedRoles) {
+ roles.add(document.getDatabase().getMetadata().getSecurity().getRole((String) d.field("name")));
+ }
}
/**
diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeEntryPersistent.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeEntryPersistent.java
index d4beb70cede..53c18eb423f 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeEntryPersistent.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreeEntryPersistent.java
@@ -221,7 +221,7 @@ protected int disconnect(final boolean iForceDirty, final int iLevel) {
// REMOVE ME FROM THE CACHE
if (pTree.cache.remove(record.getIdentity()) == null)
- OLogManager.instance().warn(this, "Can't find current node into the cache. Is the cache invalid?");
+ OLogManager.instance().debug(this, "Can't find current node into the cache. Is the cache invalid?");
int totalDisconnected = 1;
diff --git a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreePersistent.java b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreePersistent.java
index e4c934c8eef..7b1d24da0dc 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreePersistent.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreePersistent.java
@@ -69,8 +69,6 @@ public abstract class OMVRBTreePersistent<K, V> extends OMVRBTree<K, V> implemen
protected float optimizeEntryPointsFactor;
protected volatile List<OMVRBTreeEntryPersistent<K, V>> entryPoints = new ArrayList<OMVRBTreeEntryPersistent<K, V>>(
entryPointsSize);
- protected List<OMVRBTreeEntryPersistent<K, V>> newEntryPoints = new ArrayList<OMVRBTreeEntryPersistent<K, V>>(
- entryPointsSize);
protected Map<ORID, OMVRBTreeEntryPersistent<K, V>> cache = new HashMap<ORID, OMVRBTreeEntryPersistent<K, V>>();
private final OMemoryOutputStream entryRecordBuffer;
@@ -156,6 +154,13 @@ public void unload() {
}
}
+ /**
+ * Frees all the in memory objects. It's called under hard memory pressure.
+ */
+ public void freeInMemoryResources() {
+ entryPoints.clear();
+ }
+
/**
* Optimize the tree memory consumption by keeping part of nodes as entry points and clearing all the rest.
*/
@@ -222,7 +227,7 @@ public void optimize(final boolean iForce) {
else
distance = nodes / entryPointsSize + 1;
- newEntryPoints.clear();
+ final List<OMVRBTreeEntryPersistent<K, V>> newEntryPoints = new ArrayList<OMVRBTreeEntryPersistent<K, V>>(entryPointsSize + 1);
OLogManager.instance().debug(this, "Compacting nodes with distance = %d", distance);
@@ -287,9 +292,7 @@ public void optimize(final boolean iForce) {
// SWAP TMP AND REAL ENTRY POINT COLLECTIONS
entryPoints.clear();
- final List<OMVRBTreeEntryPersistent<K, V>> a = entryPoints;
entryPoints = newEntryPoints;
- newEntryPoints = a;
if (debug) {
System.out.printf("\nEntrypoints (%d): ", entryPoints.size());
|
66ae626f91e0b2bbfcf9b9059cb06b07883d9b0b
|
spring-framework
|
Only register Date converters with global format--Change JodaTimeFormatterRegistrar and DateFormatterRegistrar to only-register converters for the Date and Calendar types when a global format-has been defined. This means that the ObjectToObject converter will-handle String->Date conversion using the deprecated Date(String)-constructor (as was the case with Spring 3.1).--Issue: SPR- -
|
c
|
https://github.com/spring-projects/spring-framework
|
diff --git a/spring-context/src/main/java/org/springframework/format/datetime/DateFormatterRegistrar.java b/spring-context/src/main/java/org/springframework/format/datetime/DateFormatterRegistrar.java
index 5e45fedee3bd..b74b53262b5b 100644
--- a/spring-context/src/main/java/org/springframework/format/datetime/DateFormatterRegistrar.java
+++ b/spring-context/src/main/java/org/springframework/format/datetime/DateFormatterRegistrar.java
@@ -40,20 +40,24 @@
public class DateFormatterRegistrar implements FormatterRegistrar {
- private DateFormatter dateFormatter = new DateFormatter();
+ private DateFormatter dateFormatter;
public void registerFormatters(FormatterRegistry registry) {
addDateConverters(registry);
- registry.addFormatter(this.dateFormatter);
- registry.addFormatterForFieldType(Calendar.class, this.dateFormatter);
registry.addFormatterForFieldAnnotation(new DateTimeFormatAnnotationFormatterFactory());
+
+ // In order to retain back compatibility we only register Date/Calendar
+ // types when a user defined formatter is specified (see SPR-10105)
+ if(this.dateFormatter != null) {
+ registry.addFormatter(this.dateFormatter);
+ registry.addFormatterForFieldType(Calendar.class, this.dateFormatter);
+ }
}
/**
- * Set the date formatter to register. If not specified the default {@link DateFormatter}
- * will be used. This method can be used if additional formatter configuration is
- * required.
+ * Set the date formatter to register. If not specified no formatter is registered.
+ * This method can be used if global formatter configuration is required.
* @param dateFormatter the date formatter
*/
public void setFormatter(DateFormatter dateFormatter) {
diff --git a/spring-context/src/main/java/org/springframework/format/datetime/joda/JodaTimeFormatterRegistrar.java b/spring-context/src/main/java/org/springframework/format/datetime/joda/JodaTimeFormatterRegistrar.java
index 2035b68b8521..7ad1a4e62e64 100644
--- a/spring-context/src/main/java/org/springframework/format/datetime/joda/JodaTimeFormatterRegistrar.java
+++ b/spring-context/src/main/java/org/springframework/format/datetime/joda/JodaTimeFormatterRegistrar.java
@@ -174,7 +174,16 @@ public void registerFormatters(FormatterRegistry registry) {
addFormatterForFields(registry,
new ReadableInstantPrinter(dateTimeFormatter),
new DateTimeParser(dateTimeFormatter),
- ReadableInstant.class, Date.class, Calendar.class);
+ ReadableInstant.class);
+
+ // In order to retain back compatibility we only register Date/Calendar
+ // types when a user defined formatter is specified (see SPR-10105)
+ if(this.formatters.containsKey(Type.DATE_TIME)) {
+ addFormatterForFields(registry,
+ new ReadableInstantPrinter(dateTimeFormatter),
+ new DateTimeParser(dateTimeFormatter),
+ Date.class, Calendar.class);
+ }
registry.addFormatterForFieldAnnotation(
new JodaDateTimeFormatAnnotationFormatterFactory());
diff --git a/spring-context/src/test/java/org/springframework/format/datetime/DateFormattingTests.java b/spring-context/src/test/java/org/springframework/format/datetime/DateFormattingTests.java
index 5698ed06518e..2803f0dc9f9d 100644
--- a/spring-context/src/test/java/org/springframework/format/datetime/DateFormattingTests.java
+++ b/spring-context/src/test/java/org/springframework/format/datetime/DateFormattingTests.java
@@ -16,7 +16,10 @@
package org.springframework.format.datetime;
+import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertThat;
import java.util.ArrayList;
import java.util.Calendar;
@@ -50,9 +53,12 @@ public class DateFormattingTests {
@Before
public void setUp() {
- DefaultConversionService.addDefaultConverters(conversionService);
-
DateFormatterRegistrar registrar = new DateFormatterRegistrar();
+ setUp(registrar);
+ }
+
+ private void setUp(DateFormatterRegistrar registrar) {
+ DefaultConversionService.addDefaultConverters(conversionService);
registrar.registerFormatters(conversionService);
SimpleDateBean bean = new SimpleDateBean();
@@ -187,13 +193,48 @@ public void testBindNestedDateAnnotated() {
}
@Test
- public void dateToString() throws Exception {
+ public void dateToStringWithoutGlobalFormat() throws Exception {
+ Date date = new Date();
+ Object actual = this.conversionService.convert(date, TypeDescriptor.valueOf(Date.class), TypeDescriptor.valueOf(String.class));
+ String expected = date.toString();
+ assertEquals(expected, actual);
+ }
+
+ @Test
+ public void dateToStringWithGlobalFormat() throws Exception {
+ DateFormatterRegistrar registrar = new DateFormatterRegistrar();
+ registrar.setFormatter(new DateFormatter());
+ setUp(registrar);
Date date = new Date();
Object actual = this.conversionService.convert(date, TypeDescriptor.valueOf(Date.class), TypeDescriptor.valueOf(String.class));
String expected = new DateFormatter().print(date, Locale.US);
assertEquals(expected, actual);
}
+ @Test
+ @SuppressWarnings("deprecation")
+ public void stringToDateWithoutGlobalFormat() throws Exception {
+ // SPR-10105
+ String string = "Sat, 12 Aug 1995 13:30:00 GM";
+ Date date = this.conversionService.convert(string, Date.class);
+ assertThat(date, equalTo(new Date(string)));
+ }
+
+ @Test
+ public void stringToDateWithGlobalFormat() throws Exception {
+ // SPR-10105
+ DateFormatterRegistrar registrar = new DateFormatterRegistrar();
+ DateFormatter dateFormatter = new DateFormatter();
+ dateFormatter.setIso(ISO.DATE_TIME);
+ registrar.setFormatter(dateFormatter);
+ setUp(registrar);
+ // This is a format that cannot be parsed by new Date(String)
+ String string = "2009-06-01T14:23:05.003+0000";
+ Date date = this.conversionService.convert(string, Date.class);
+ assertNotNull(date);
+ }
+
+
@SuppressWarnings("unused")
private static class SimpleDateBean {
diff --git a/spring-context/src/test/java/org/springframework/format/datetime/joda/JodaTimeFormattingTests.java b/spring-context/src/test/java/org/springframework/format/datetime/joda/JodaTimeFormattingTests.java
index 237df0509f3f..c2aaf8d36a3c 100644
--- a/spring-context/src/test/java/org/springframework/format/datetime/joda/JodaTimeFormattingTests.java
+++ b/spring-context/src/test/java/org/springframework/format/datetime/joda/JodaTimeFormattingTests.java
@@ -16,6 +16,11 @@
package org.springframework.format.datetime.joda;
+import static org.hamcrest.Matchers.equalTo;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertThat;
+
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
@@ -32,7 +37,6 @@
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
-
import org.springframework.beans.MutablePropertyValues;
import org.springframework.context.i18n.LocaleContextHolder;
import org.springframework.core.convert.TypeDescriptor;
@@ -42,8 +46,6 @@
import org.springframework.format.support.FormattingConversionService;
import org.springframework.validation.DataBinder;
-import static org.junit.Assert.*;
-
/**
* @author Keith Donald
* @author Juergen Hoeller
@@ -459,13 +461,40 @@ public void testBindMutableDateTimeAnnotated() {
}
@Test
- public void dateToString() throws Exception {
+ public void dateToStringWithFormat() throws Exception {
+ JodaTimeFormatterRegistrar registrar = new JodaTimeFormatterRegistrar();
+ registrar.setDateTimeFormatter(org.joda.time.format.DateTimeFormat.shortDateTime());
+ setUp(registrar);
Date date = new Date();
Object actual = this.conversionService.convert(date, TypeDescriptor.valueOf(Date.class), TypeDescriptor.valueOf(String.class));
String expected = JodaTimeContextHolder.getFormatter(org.joda.time.format.DateTimeFormat.shortDateTime(), Locale.US).print(new DateTime(date));
assertEquals(expected, actual);
}
+ @Test
+ @SuppressWarnings("deprecation")
+ public void stringToDateWithoutGlobalFormat() throws Exception {
+ // SPR-10105
+ String string = "Sat, 12 Aug 1995 13:30:00 GM";
+ Date date = this.conversionService.convert(string, Date.class);
+ assertThat(date, equalTo(new Date(string)));
+ }
+
+ @Test
+ public void stringToDateWithGlobalFormat() throws Exception {
+ // SPR-10105
+ JodaTimeFormatterRegistrar registrar = new JodaTimeFormatterRegistrar();
+ DateTimeFormatterFactory factory = new DateTimeFormatterFactory();
+ factory.setIso(ISO.DATE_TIME);
+ registrar.setDateTimeFormatter(factory.createDateTimeFormatter());
+ setUp(registrar);
+ // This is a format that cannot be parsed by new Date(String)
+ String string = "2009-10-31T07:00:00.000-05:00";
+ Date date = this.conversionService.convert(string, Date.class);
+ assertNotNull(date);
+ }
+
+
@SuppressWarnings("unused")
private static class JodaTimeBean {
|
ca849f196990eec942468efaef3719f829c265eb
|
orientdb
|
Improved management of distributed cluster nodes--
|
p
|
https://github.com/orientechnologies/orientdb
|
diff --git a/client/src/main/java/com/orientechnologies/orient/client/remote/OServerAdmin.java b/client/src/main/java/com/orientechnologies/orient/client/remote/OServerAdmin.java
index 6ad17e70a96..80b8a0fa780 100644
--- a/client/src/main/java/com/orientechnologies/orient/client/remote/OServerAdmin.java
+++ b/client/src/main/java/com/orientechnologies/orient/client/remote/OServerAdmin.java
@@ -117,7 +117,7 @@ public OServerAdmin deleteDatabase() throws IOException {
}
public OServerAdmin shareDatabase(final String iDatabaseName, final String iDatabaseUserName, final String iDatabaseUserPassword,
- final String iRemoteName) throws IOException {
+ final String iRemoteName, final String iMode) throws IOException {
try {
storage.writeCommand(OChannelDistributedProtocol.REQUEST_DISTRIBUTED_DB_SHARE_SENDER);
@@ -125,11 +125,13 @@ public OServerAdmin shareDatabase(final String iDatabaseName, final String iData
storage.getNetwork().writeString(iDatabaseUserName);
storage.getNetwork().writeString(iDatabaseUserPassword);
storage.getNetwork().writeString(iRemoteName);
+ storage.getNetwork().writeString(iMode);
storage.getNetwork().flush();
storage.getNetwork().readStatus();
- OLogManager.instance().debug(this, "Database %s has been shared with the server %s.", iDatabaseName, iRemoteName);
+ OLogManager.instance().debug(this, "Database '%s' has been shared in mode '%s' with the server '%s'", iDatabaseName, iMode,
+ iRemoteName);
} catch (Exception e) {
OLogManager.instance().exception("Can't share the database: " + iDatabaseName, e, OStorageException.class);
diff --git a/server/src/main/java/com/orientechnologies/orient/server/handler/OServerHandler.java b/server/src/main/java/com/orientechnologies/orient/server/handler/OServerHandler.java
index 6a1603e0075..e3e174cf388 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/handler/OServerHandler.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/handler/OServerHandler.java
@@ -40,17 +40,20 @@ public interface OServerHandler extends OService {
/**
* Callback invoked before a client request is processed.
*/
- public void onBeforeClientRequest(OClientConnection iConnection, byte iRequestType);
+ public void onBeforeClientRequest(OClientConnection iConnection, Object iRequestType);
/**
* Callback invoked after a client request is processed.
*/
- public void onAfterClientRequest(OClientConnection iConnection, byte iRequestType);
+ public void onAfterClientRequest(OClientConnection iConnection, Object iRequestType);
/**
* Callback invoked when a client connection has errors.
+ *
+ * @param iThrowable
+ * Throwable instance received
*/
- public void onClientError(OClientConnection iConnection);
+ public void onClientError(OClientConnection iConnection, Throwable iThrowable);
/**
* Configures the handler. Called at startup.
diff --git a/server/src/main/java/com/orientechnologies/orient/server/handler/OServerHandlerAbstract.java b/server/src/main/java/com/orientechnologies/orient/server/handler/OServerHandlerAbstract.java
index 39a5c4d457d..a146ac509fa 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/handler/OServerHandlerAbstract.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/handler/OServerHandlerAbstract.java
@@ -25,12 +25,12 @@ public void onClientConnection(final OClientConnection iConnection) {
public void onClientDisconnection(final OClientConnection iConnection) {
}
- public void onBeforeClientRequest(final OClientConnection iConnection, final byte iRequestType) {
+ public void onBeforeClientRequest(final OClientConnection iConnection, final Object iRequestType) {
}
- public void onAfterClientRequest(final OClientConnection iConnection, final byte iRequestType) {
+ public void onAfterClientRequest(final OClientConnection iConnection, final Object iRequestType) {
}
- public void onClientError(final OClientConnection iConnection) {
+ public void onClientError(final OClientConnection iConnection, final Throwable iThrowable) {
}
}
diff --git a/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerManager.java b/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerManager.java
index 483c76c6359..8794e058c3f 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerManager.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerManager.java
@@ -19,6 +19,7 @@
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.HashMap;
+import java.util.LinkedHashMap;
import java.util.List;
import javax.crypto.SecretKey;
@@ -26,12 +27,15 @@
import com.orientechnologies.common.concur.resource.OSharedResourceExternal;
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.orient.core.Orient;
+import com.orientechnologies.orient.core.db.ODatabase;
+import com.orientechnologies.orient.core.db.ODatabaseComplex;
+import com.orientechnologies.orient.core.db.ODatabaseLifecycleListener;
import com.orientechnologies.orient.core.exception.OConfigurationException;
import com.orientechnologies.orient.core.record.ORecordInternal;
+import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.security.OSecurityManager;
import com.orientechnologies.orient.core.serialization.OBase64Utils;
import com.orientechnologies.orient.core.tx.OTransactionEntry;
-import com.orientechnologies.orient.enterprise.channel.binary.OChannelBinaryProtocol;
import com.orientechnologies.orient.server.OClientConnection;
import com.orientechnologies.orient.server.OServer;
import com.orientechnologies.orient.server.config.OServerHandlerConfiguration;
@@ -57,19 +61,20 @@
* @see ODistributedServerDiscoveryListener, ODistributedServerDiscoverySignaler
*
*/
-public class ODistributedServerManager extends OServerHandlerAbstract {
+public class ODistributedServerManager extends OServerHandlerAbstract {
protected OServer server;
protected String name;
+ protected String id;
protected SecretKey securityKey;
protected String securityAlgorithm;
protected InetAddress networkMulticastAddress;
protected int networkMulticastPort;
- protected int networkMulticastHeartbeat; // IN MS
- protected int networkTimeoutLeader; // IN MS
- protected int networkTimeoutNode; // IN MS
- private int networkHeartbeatDelay; // IN MS
- protected int serverUpdateDelay; // IN MS
+ protected int networkMulticastHeartbeat; // IN
+ protected int networkTimeoutLeader; // IN
+ protected int networkTimeoutNode; // IN
+ private int networkHeartbeatDelay; // IN
+ protected int serverUpdateDelay; // IN
protected int serverOutSynchMaxBuffers;
private ODistributedServerDiscoverySignaler discoverySignaler;
@@ -78,7 +83,7 @@ public class ODistributedServerManager extends OServerHandlerAbstract {
private ODistributedServerRecordHook trigger;
private final OSharedResourceExternal lock = new OSharedResourceExternal();
- private final HashMap<String, ODistributedServerNode> nodes = new HashMap<String, ODistributedServerNode>(); ;
+ private final HashMap<String, ODistributedServerNode> nodes = new LinkedHashMap<String, ODistributedServerNode>(); ;
static final String CHECKSUM = "ChEcKsUm1976";
@@ -88,8 +93,11 @@ public class ODistributedServerManager extends OServerHandlerAbstract {
private OServerNetworkListener distributedNetworkListener;
private ONetworkProtocolDistributed leaderConnection;
public long lastHeartBeat;
+ private ODocument clusterConfiguration;
public void startup() {
+ trigger = new ODistributedServerRecordHook(this);
+
// LAUNCH THE SIGNAL AND WAIT FOR A CONNECTION
discoverySignaler = new ODistributedServerDiscoverySignaler(this, distributedNetworkListener);
}
@@ -195,6 +203,9 @@ else if (leaderConnection != null)
// STOP THE CHECK OF HEART-BEAT
leaderCheckerTask.cancel();
+ if (clusterConfiguration == null)
+ clusterConfiguration = createDatabaseConfiguration();
+
// NO NODE HAS JOINED: BECAME THE LEADER AND LISTEN FOR OTHER NODES
discoveryListener = new ODistributedServerDiscoveryListener(this, distributedNetworkListener);
@@ -203,21 +214,8 @@ else if (leaderConnection != null)
}
}
- /**
- * Install the trigger to catch all the events on records
- */
- @Override
- public void onAfterClientRequest(final OClientConnection iConnection, final byte iRequestType) {
- if (iRequestType == OChannelBinaryProtocol.REQUEST_DB_OPEN || iRequestType == OChannelBinaryProtocol.REQUEST_DB_CREATE) {
- trigger = new ODistributedServerRecordHook(this, iConnection);
- iConnection.database.registerHook(trigger);
-
- // TODO: SEND THE CLUSTER CONFIG TO THE CLIENT
- }
- }
-
@Override
- public void onClientError(final OClientConnection iConnection) {
+ public void onClientError(final OClientConnection iConnection, final Throwable iThrowable) {
// handleNodeFailure(node);
}
@@ -295,6 +293,8 @@ else if ("server.outsynch.maxbuffers".equalsIgnoreCase(param.name))
"Can't find a configured network listener with 'distributed' protocol. Can't start distributed node", null,
OConfigurationException.class);
+ id = distributedNetworkListener.getInboundAddr().getHostName() + ":" + distributedNetworkListener.getInboundAddr().getPort();
+
} catch (Exception e) {
throw new OConfigurationException("Can't configure OrientDB Server as Cluster Node", e);
}
@@ -357,10 +357,8 @@ public String getName() {
/**
* Distributed the request to all the configured nodes. Each node has the responsibility to bring the message early (synch-mode)
* or using an asynchronous queue.
- *
- * @param iConnection
*/
- public void distributeRequest(final OClientConnection iConnection, final OTransactionEntry<ORecordInternal<?>> iTransactionEntry) {
+ public void distributeRequest(final OTransactionEntry<ORecordInternal<?>> iTransactionEntry) {
lock.acquireSharedLock();
try {
@@ -382,10 +380,6 @@ public int getNetworkHeartbeatDelay() {
return networkHeartbeatDelay;
}
- private static String getNodeName(final String iServerAddress, final int iServerPort) {
- return iServerAddress + ":" + iServerPort;
- }
-
public long getLastHeartBeat() {
return lastHeartBeat;
}
@@ -393,4 +387,25 @@ public long getLastHeartBeat() {
public void updateHeartBeatTime() {
this.lastHeartBeat = System.currentTimeMillis();
}
+
+ public ODocument getClusterConfiguration() {
+ return clusterConfiguration;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ private static String getNodeName(final String iServerAddress, final int iServerPort) {
+ return iServerAddress + ":" + iServerPort;
+ }
+
+ private ODocument createDatabaseConfiguration() {
+ clusterConfiguration = new ODocument();
+
+ clusterConfiguration.field("servers", new ODocument(getId(), new ODocument("update-delay", getServerUpdateDelay())));
+ clusterConfiguration.field("clusters", new ODocument("*", new ODocument("owner", getId())));
+
+ return clusterConfiguration;
+ }
}
diff --git a/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerNode.java b/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerNode.java
index 3d82d444c6b..9d608990124 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerNode.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerNode.java
@@ -22,12 +22,15 @@
import java.util.Map;
import com.orientechnologies.common.log.OLogManager;
+import com.orientechnologies.orient.core.command.OCommandOutputListener;
import com.orientechnologies.orient.core.config.OContextConfiguration;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
+import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
+import com.orientechnologies.orient.core.db.tool.ODatabaseExport;
import com.orientechnologies.orient.core.record.ORecordInternal;
-import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.tx.OTransactionEntry;
import com.orientechnologies.orient.enterprise.channel.binary.OChannelBinaryClient;
+import com.orientechnologies.orient.enterprise.channel.binary.OChannelBinaryOutputStream;
import com.orientechnologies.orient.enterprise.channel.distributed.OChannelDistributedProtocol;
/**
@@ -36,18 +39,19 @@
* @author Luca Garulli (l.garulli--at--orientechnologies.com)
*
*/
-public class ODistributedServerNode {
+public class ODistributedServerNode implements OCommandOutputListener {
public enum STATUS {
DISCONNECTED, CONNECTING, CONNECTED, SYNCHRONIZING
}
+ private String id;
public String networkAddress;
public int networkPort;
public Date joinedOn;
private ODistributedServerManager manager;
public OChannelBinaryClient channel;
private OContextConfiguration configuration;
- private STATUS status = STATUS.DISCONNECTED;
+ private volatile STATUS status = STATUS.DISCONNECTED;
private Map<String, Long> storages = new HashMap<String, Long>();
private List<OTransactionEntry<ORecordInternal<?>>> bufferedChanges = new ArrayList<OTransactionEntry<ORecordInternal<?>>>();
@@ -57,6 +61,7 @@ public ODistributedServerNode(final ODistributedServerManager iNode, final Strin
networkPort = iServerPort;
joinedOn = new Date();
configuration = new OContextConfiguration();
+ id = networkAddress + ":" + networkPort;
status = STATUS.CONNECTING;
}
@@ -83,9 +88,13 @@ public void sendRequest(final OTransactionEntry<ORecordInternal<?>> iRequest) th
// BUFFER EXCEEDS THE CONFIGURED LIMIT: REMOVE MYSELF AS NODE
manager.removeNode(this);
bufferedChanges.clear();
- } else
+ } else {
// BUFFERIZE THE REQUEST
bufferedChanges.add(iRequest);
+
+ OLogManager.instance().info(this, "Server node '%s' is temporary disconnected, buffering change %d/%d for the record %s",
+ id, bufferedChanges.size(), manager.serverOutSynchMaxBuffers, iRequest.getRecord().getIdentity());
+ }
}
} else {
final ORecordInternal<?> record = iRequest.getRecord();
@@ -93,42 +102,58 @@ public void sendRequest(final OTransactionEntry<ORecordInternal<?>> iRequest) th
try {
switch (iRequest.status) {
case OTransactionEntry.CREATED:
- channel.writeByte(OChannelDistributedProtocol.REQUEST_RECORD_CREATE);
- channel.writeInt(0);
- channel.writeShort((short) record.getIdentity().getClusterId());
- channel.writeBytes(record.toStream());
- channel.writeByte(record.getRecordType());
- channel.flush();
-
- channel.readStatus();
+ channel.acquireExclusiveLock();
+ try {
+ channel.writeByte(OChannelDistributedProtocol.REQUEST_RECORD_CREATE);
+ channel.writeInt(0);
+ channel.writeShort((short) record.getIdentity().getClusterId());
+ channel.writeBytes(record.toStream());
+ channel.writeByte(record.getRecordType());
+ channel.flush();
+
+ channel.readStatus();
+
+ } finally {
+ channel.releaseExclusiveLock();
+ }
break;
case OTransactionEntry.UPDATED:
- channel.writeByte(OChannelDistributedProtocol.REQUEST_RECORD_UPDATE);
- channel.writeInt(0);
- channel.writeShort((short) record.getIdentity().getClusterId());
- channel.writeLong(record.getIdentity().getClusterPosition());
- channel.writeBytes(record.toStream());
- channel.writeInt(record.getVersion());
- channel.writeByte(record.getRecordType());
- channel.flush();
-
- readStatus();
-
- channel.readInt();
+ channel.acquireExclusiveLock();
+ try {
+ channel.writeByte(OChannelDistributedProtocol.REQUEST_RECORD_UPDATE);
+ channel.writeInt(0);
+ channel.writeShort((short) record.getIdentity().getClusterId());
+ channel.writeLong(record.getIdentity().getClusterPosition());
+ channel.writeBytes(record.toStream());
+ channel.writeInt(record.getVersion());
+ channel.writeByte(record.getRecordType());
+ channel.flush();
+
+ readStatus();
+
+ channel.readInt();
+ } finally {
+ channel.releaseExclusiveLock();
+ }
break;
case OTransactionEntry.DELETED:
- channel.writeByte(OChannelDistributedProtocol.REQUEST_RECORD_DELETE);
- channel.writeInt(0);
- channel.writeShort((short) record.getIdentity().getClusterId());
- channel.writeLong(record.getIdentity().getClusterPosition());
- channel.writeInt(record.getVersion());
- channel.flush();
-
- readStatus();
-
- channel.readLong();
+ channel.acquireExclusiveLock();
+ try {
+ channel.writeByte(OChannelDistributedProtocol.REQUEST_RECORD_DELETE);
+ channel.writeInt(0);
+ channel.writeShort((short) record.getIdentity().getClusterId());
+ channel.writeLong(record.getIdentity().getClusterPosition());
+ channel.writeInt(record.getVersion());
+ channel.flush();
+
+ readStatus();
+
+ channel.readLong();
+ } finally {
+ channel.releaseExclusiveLock();
+ }
break;
}
} catch (RuntimeException e) {
@@ -175,15 +200,13 @@ public void setAsTemporaryDisconnected(final int iServerOutSynchMaxBuffers) {
}
public void startSynchronization() {
- final ODocument config = createDatabaseConfiguration();
-
// SEND THE LAST CONFIGURATION TO THE NODE
channel.acquireExclusiveLock();
try {
channel.out.writeByte(OChannelDistributedProtocol.REQUEST_DISTRIBUTED_DB_CONFIG);
channel.out.writeInt(0);
- channel.writeBytes(config.toStream());
+ channel.writeBytes(manager.getClusterConfiguration().toStream());
channel.flush();
readStatus();
@@ -202,9 +225,51 @@ public void startSynchronization() {
@Override
public String toString() {
- final StringBuilder builder = new StringBuilder();
- builder.append(networkAddress).append(":").append(networkPort);
- return builder.toString();
+ return id;
+ }
+
+ public STATUS getStatus() {
+ return status;
+ }
+
+ public void shareDatabase(final ODatabaseRecord<?> iDatabase, final String iRemoteServerName, final String iEngineName,
+ final String iMode) throws IOException {
+ if (status == STATUS.DISCONNECTED)
+ throw new ODistributedSynchronizationException("Can't share database '" + iDatabase.getName() + "' on remote server node '"
+ + iRemoteServerName + "' because is disconnected");
+
+ channel.acquireExclusiveLock();
+
+ try {
+ status = STATUS.SYNCHRONIZING;
+
+ OLogManager.instance().info(this,
+ "Sharing database '" + iDatabase.getName() + "' to remote server " + iRemoteServerName + "...");
+
+ // EXECUTE THE REQUEST ON REMOTE SERVER NODE
+ channel.writeByte(OChannelDistributedProtocol.REQUEST_DISTRIBUTED_DB_SHARE_RECEIVER);
+ channel.writeInt(0);
+ channel.writeString(iDatabase.getName());
+ channel.writeString(iEngineName);
+
+ OLogManager.instance().info(this, "Exporting database '%s' via streaming to remote server node: %s...", iDatabase.getName(),
+ iRemoteServerName);
+
+ // START THE EXPORT GIVING AS OUTPUTSTREAM THE CHANNEL TO STREAM THE EXPORT
+ new ODatabaseExport(iDatabase, new OChannelBinaryOutputStream(channel), this).exportDatabase();
+
+ OLogManager.instance().info(this, "Database exported correctly");
+
+ channel.readStatus();
+
+ status = STATUS.CONNECTED;
+
+ } finally {
+ channel.releaseExclusiveLock();
+ }
+ }
+
+ public void onMessage(String iText) {
}
private void synchronizeDelta() throws IOException {
@@ -212,8 +277,8 @@ private void synchronizeDelta() throws IOException {
if (bufferedChanges.isEmpty())
return;
- OLogManager.instance().info(this, "Started realignment of remote node %s:%d after a reconnection. Found %d updates",
- networkAddress, networkPort, bufferedChanges.size());
+ OLogManager.instance().info(this, "Started realignment of remote node '%s' after a reconnection. Found %d updates", id,
+ bufferedChanges.size());
status = STATUS.SYNCHRONIZING;
@@ -222,6 +287,8 @@ private void synchronizeDelta() throws IOException {
}
bufferedChanges.clear();
+ OLogManager.instance().info(this, "Realignment of remote node '%s' done", id);
+
status = STATUS.CONNECTED;
}
@@ -231,17 +298,4 @@ private void synchronizeDelta() throws IOException {
private int readStatus() throws IOException {
return channel.readStatus();
}
-
- private ODocument createDatabaseConfiguration() {
- final ODocument config = new ODocument();
-
- config.field("servers", new ODocument(manager.getName(), new ODocument("update-delay", manager.getServerUpdateDelay())));
- config.field("clusters", new ODocument("*", new ODocument("owner", manager.getName())));
-
- return config;
- }
-
- public STATUS getStatus() {
- return status;
- }
}
diff --git a/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerNodeChecker.java b/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerNodeChecker.java
index efdbf3cf902..c669a9f1338 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerNodeChecker.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerNodeChecker.java
@@ -45,7 +45,7 @@ public void run() {
// CHECK EVERY SINGLE NODE
for (ODistributedServerNode node : nodeList) {
- if (node.getStatus() != STATUS.DISCONNECTED)
+ if (node.getStatus() == STATUS.CONNECTED)
if (!node.sendHeartBeat(manager.networkTimeoutLeader)) {
manager.handleNodeFailure(node);
}
diff --git a/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerRecordHook.java b/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerRecordHook.java
index 37bc038dd3c..8ee04caf27a 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerRecordHook.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/handler/distributed/ODistributedServerRecordHook.java
@@ -15,52 +15,75 @@
*/
package com.orientechnologies.orient.server.handler.distributed;
+import com.orientechnologies.common.log.OLogManager;
+import com.orientechnologies.orient.core.Orient;
+import com.orientechnologies.orient.core.db.ODatabase;
+import com.orientechnologies.orient.core.db.ODatabaseComplex;
+import com.orientechnologies.orient.core.db.ODatabaseLifecycleListener;
import com.orientechnologies.orient.core.hook.ORecordHook;
import com.orientechnologies.orient.core.record.ORecord;
import com.orientechnologies.orient.core.record.ORecordInternal;
import com.orientechnologies.orient.core.tx.OTransactionEntry;
-import com.orientechnologies.orient.server.OClientConnection;
/**
* Record hook implementation. Catches all the relevant events and propagates to the cluster's slave nodes.
*
* @author Luca Garulli (l.garulli--at--orientechnologies.com)
*/
-public class ODistributedServerRecordHook implements ORecordHook {
+public class ODistributedServerRecordHook implements ORecordHook, ODatabaseLifecycleListener {
private ODistributedServerManager manager;
- private OClientConnection connection;
- public ODistributedServerRecordHook(final ODistributedServerManager iDistributedServerManager, final OClientConnection iConnection) {
+ /**
+ * Auto install itself as lifecycle listener for databases.
+ */
+ public ODistributedServerRecordHook(final ODistributedServerManager iDistributedServerManager) {
manager = iDistributedServerManager;
- connection = iConnection;
+ Orient.instance().addDbLifecycleListener(this);
}
public void onTrigger(final TYPE iType, final ORecord<?> iRecord) {
if (!manager.isDistributedConfiguration())
return;
+ OLogManager.instance().info(
+ this,
+ "Caught change " + iType + " in database '" + iRecord.getDatabase().getName() + "', record: " + iRecord.getIdentity()
+ + ". Distribute the change in all the cluster nodes");
+
switch (iType) {
case AFTER_CREATE:
- manager.distributeRequest(connection, new OTransactionEntry<ORecordInternal<?>>((ORecordInternal<?>) iRecord,
- OTransactionEntry.CREATED, null));
+ manager.distributeRequest(new OTransactionEntry<ORecordInternal<?>>((ORecordInternal<?>) iRecord, OTransactionEntry.CREATED,
+ null));
break;
case AFTER_UPDATE:
- manager.distributeRequest(connection, new OTransactionEntry<ORecordInternal<?>>((ORecordInternal<?>) iRecord,
- OTransactionEntry.UPDATED, null));
+ manager.distributeRequest(new OTransactionEntry<ORecordInternal<?>>((ORecordInternal<?>) iRecord, OTransactionEntry.UPDATED,
+ null));
break;
case AFTER_DELETE:
- manager.distributeRequest(connection, new OTransactionEntry<ORecordInternal<?>>((ORecordInternal<?>) iRecord,
- OTransactionEntry.DELETED, null));
+ manager.distributeRequest(new OTransactionEntry<ORecordInternal<?>>((ORecordInternal<?>) iRecord, OTransactionEntry.DELETED,
+ null));
break;
default:
// NOT DISTRIBUTED REQUEST, JUST RETURN
return;
}
+ }
+
+ /**
+ * Install the itself as trigger to catch all the events against records
+ */
+ public void onOpen(final ODatabase iDatabase) {
+ ((ODatabaseComplex<?>) iDatabase).registerHook(this);
+ }
- System.out.println("\nCatched update to database: " + iType + " record: " + iRecord);
+ /**
+ * Remove itself as trigger to catch all the events against records
+ */
+ public void onClose(final ODatabase iDatabase) {
+ ((ODatabaseComplex<?>) iDatabase).unregisterHook(this);
}
}
diff --git a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java
index fe7f6dbedaa..33fb84a9357 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/ONetworkProtocolBinary.java
@@ -21,7 +21,6 @@
import java.net.SocketException;
import java.util.Collection;
import java.util.HashSet;
-import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -70,7 +69,7 @@
import com.orientechnologies.orient.server.OServer;
import com.orientechnologies.orient.server.OServerMain;
import com.orientechnologies.orient.server.config.OServerUserConfiguration;
-import com.orientechnologies.orient.server.handler.OServerHandler;
+import com.orientechnologies.orient.server.handler.OServerHandlerHelper;
import com.orientechnologies.orient.server.network.protocol.ONetworkProtocol;
import com.orientechnologies.orient.server.tx.OTransactionOptimisticProxy;
import com.orientechnologies.orient.server.tx.OTransactionRecordProxy;
@@ -120,20 +119,24 @@ protected void execute() throws Exception {
data.lastCommandReceived = System.currentTimeMillis();
- invokeHandlerCallbackOnBeforeClientRequest((byte) requestType);
+ OServerHandlerHelper.invokeHandlerCallbackOnBeforeClientRequest(connection, (byte) requestType);
parseCommand();
- invokeHandlerCallbackOnAfterClientRequest((byte) requestType);
+ OServerHandlerHelper.invokeHandlerCallbackOnAfterClientRequest(connection, (byte) requestType);
} catch (EOFException eof) {
+ OServerHandlerHelper.invokeHandlerCallbackOnClientError(connection, eof);
sendShutdown();
} catch (SocketException e) {
+ OServerHandlerHelper.invokeHandlerCallbackOnClientError(connection, e);
sendShutdown();
} catch (OException e) {
+ OServerHandlerHelper.invokeHandlerCallbackOnClientError(connection, e);
channel.clearInput();
sendError(clientTxId, e);
} catch (Throwable t) {
+ OServerHandlerHelper.invokeHandlerCallbackOnClientError(connection, t);
OLogManager.instance().error(this, "Error on executing request", t);
channel.clearInput();
sendError(clientTxId, t);
@@ -679,7 +682,7 @@ else if (iLinked instanceof Map<?, ?>)
@Override
public void startup() {
- invokeHandlerCallbackOnClientDisconnection();
+ OServerHandlerHelper.invokeHandlerCallbackOnClientConnection(connection);
}
@Override
@@ -687,7 +690,7 @@ public void shutdown() {
sendShutdown();
channel.close();
- invokeHandlerCallbackOnClientDisconnection();
+ OServerHandlerHelper.invokeHandlerCallbackOnClientDisconnection(connection);
OClientConnectionManager.instance().onClientDisconnection(connection.id);
}
@@ -780,46 +783,6 @@ private void writeRecord(final ORecordInternal<?> iRecord) throws IOException {
}
}
- private void invokeHandlerCallbackOnClientConnection() {
- final List<OServerHandler> handlers = OServerMain.server().getHandlers();
- if (handlers != null)
- for (OServerHandler handler : handlers) {
- handler.onClientConnection(connection);
- }
- }
-
- private void invokeHandlerCallbackOnClientDisconnection() {
- final List<OServerHandler> handlers = OServerMain.server().getHandlers();
- if (handlers != null)
- for (OServerHandler handler : handlers) {
- handler.onClientDisconnection(connection);
- }
- }
-
- private void invokeHandlerCallbackOnBeforeClientRequest(final byte iRequestType) {
- final List<OServerHandler> handlers = OServerMain.server().getHandlers();
- if (handlers != null)
- for (OServerHandler handler : handlers) {
- handler.onBeforeClientRequest(connection, iRequestType);
- }
- }
-
- private void invokeHandlerCallbackOnAfterClientRequest(final byte iRequestType) {
- final List<OServerHandler> handlers = OServerMain.server().getHandlers();
- if (handlers != null)
- for (OServerHandler handler : handlers) {
- handler.onAfterClientRequest(connection, iRequestType);
- }
- }
-
- private void invokeHandlerCallbackOnClientError() {
- final List<OServerHandler> handlers = OServerMain.server().getHandlers();
- if (handlers != null)
- for (OServerHandler handler : handlers) {
- handler.onClientError(connection);
- }
- }
-
protected ODatabaseDocumentTx openDatabase(final String dbName, final String iUser, final String iPassword) {
// SEARCH THE DB IN MEMORY FIRST
ODatabaseDocumentTx db = (ODatabaseDocumentTx) OServerMain.server().getMemoryDatabases().get(dbName);
@@ -853,8 +816,6 @@ protected void createDatabase(final ODatabaseDocumentTx iDatabase) {
}
underlyingDatabase = ((ODatabaseRaw) ((ODatabaseComplex<?>) iDatabase.getUnderlying()).getUnderlying());
-
- invokeHandlerCallbackOnClientConnection();
}
protected ODatabaseDocumentTx getDatabaseInstance(final String iDbName, final String iStorageMode) {
diff --git a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/distributed/ONetworkProtocolDistributed.java b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/distributed/ONetworkProtocolDistributed.java
index bf8cd470dd1..6bbaa7bfd48 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/distributed/ONetworkProtocolDistributed.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/distributed/ONetworkProtocolDistributed.java
@@ -20,20 +20,16 @@
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.orient.core.command.OCommandOutputListener;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
-import com.orientechnologies.orient.core.db.tool.ODatabaseExport;
import com.orientechnologies.orient.core.db.tool.ODatabaseImport;
import com.orientechnologies.orient.core.exception.OConfigurationException;
import com.orientechnologies.orient.core.metadata.security.OUser;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.storage.impl.local.OStorageLocal;
import com.orientechnologies.orient.enterprise.channel.binary.OChannelBinaryInputStream;
-import com.orientechnologies.orient.enterprise.channel.binary.OChannelBinaryOutputStream;
import com.orientechnologies.orient.enterprise.channel.distributed.OChannelDistributedProtocol;
import com.orientechnologies.orient.server.OServerMain;
import com.orientechnologies.orient.server.handler.distributed.ODistributedServerManager;
import com.orientechnologies.orient.server.handler.distributed.ODistributedServerNode;
-import com.orientechnologies.orient.server.handler.distributed.ODistributedServerNode.STATUS;
-import com.orientechnologies.orient.server.handler.distributed.ODistributedSynchronizationException;
import com.orientechnologies.orient.server.network.protocol.binary.ONetworkProtocolBinary;
/**
@@ -88,6 +84,7 @@ protected void parseCommand() throws IOException {
final String dbUser = channel.readString();
final String dbPassword = channel.readString();
final String remoteServerName = channel.readString();
+ final String mode = channel.readString();
checkServerAccess("database.share");
@@ -96,34 +93,8 @@ protected void parseCommand() throws IOException {
final String engineName = db.getStorage() instanceof OStorageLocal ? "local" : "memory";
final ODistributedServerNode remoteServerNode = manager.getNode(remoteServerName);
- if (remoteServerNode.getStatus() == STATUS.DISCONNECTED)
- throw new ODistributedSynchronizationException("Can't share database '" + dbName + "' on remote server node '"
- + remoteServerName + "' because is disconnected");
- try {
- remoteServerNode.channel.acquireExclusiveLock();
-
- OLogManager.instance().info(this, "Sharing database '" + dbName + "' to remote server " + remoteServerName + "...");
-
- // EXECUTE THE REQUEST ON REMOTE SERVER NODE
- remoteServerNode.channel.writeByte(OChannelDistributedProtocol.REQUEST_DISTRIBUTED_DB_SHARE_RECEIVER);
- remoteServerNode.channel.writeInt(0);
- remoteServerNode.channel.writeString(dbName);
- remoteServerNode.channel.writeString(engineName);
-
- OLogManager.instance().info(this, "Exporting database '%s' via streaming to remote server node: %s...", dbName,
- remoteServerName);
-
- // START THE EXPORT GIVING AS OUTPUTSTREAM THE CHANNEL TO STREAM THE EXPORT
- new ODatabaseExport(db, new OChannelBinaryOutputStream(remoteServerNode.channel), this).exportDatabase();
-
- OLogManager.instance().info(this, "Database exported correctly");
-
- remoteServerNode.channel.readStatus();
-
- } finally {
- remoteServerNode.channel.releaseExclusiveLock();
- }
+ remoteServerNode.shareDatabase(db, remoteServerName, engineName, mode);
sendOk(0);
diff --git a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/ONetworkProtocolHttpAbstract.java b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/ONetworkProtocolHttpAbstract.java
index 2eb8225aaba..f2d4a6a2e9c 100644
--- a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/ONetworkProtocolHttpAbstract.java
+++ b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/http/ONetworkProtocolHttpAbstract.java
@@ -44,6 +44,7 @@
import com.orientechnologies.orient.server.OClientConnectionManager;
import com.orientechnologies.orient.server.OServer;
import com.orientechnologies.orient.server.config.OServerConfiguration;
+import com.orientechnologies.orient.server.handler.OServerHandlerHelper;
import com.orientechnologies.orient.server.network.protocol.ONetworkProtocol;
import com.orientechnologies.orient.server.network.protocol.http.command.OServerCommand;
@@ -120,10 +121,15 @@ public void service() throws ONetworkProtocolException, IOException {
if (cmd != null)
try {
+ OServerHandlerHelper.invokeHandlerCallbackOnBeforeClientRequest(connection, cmd);
+
if (cmd.beforeExecute(request)) {
// EXECUTE THE COMMAND
cmd.execute(request);
}
+
+ OServerHandlerHelper.invokeHandlerCallbackOnAfterClientRequest(connection, cmd);
+
} catch (Exception e) {
handleError(e);
}
|
84225b56288b90fe708cf37c909cec8be150faae
|
intellij-community
|
SVN: configure branches NPE--
|
c
|
https://github.com/JetBrains/intellij-community
|
diff --git a/plugins/svn4idea/src/org/jetbrains/idea/svn/actions/ConfigureBranchesAction.java b/plugins/svn4idea/src/org/jetbrains/idea/svn/actions/ConfigureBranchesAction.java
index 9a0e9a5d9949c..dbb0dfe555c81 100644
--- a/plugins/svn4idea/src/org/jetbrains/idea/svn/actions/ConfigureBranchesAction.java
+++ b/plugins/svn4idea/src/org/jetbrains/idea/svn/actions/ConfigureBranchesAction.java
@@ -31,7 +31,8 @@ public void update(final AnActionEvent e) {
final ChangeList[] cls = e.getData(VcsDataKeys.CHANGE_LISTS);
presentation.setEnabled((cls != null) && (cls.length > 0) &&
- (SvnVcs.getInstance(project).getName().equals(((CommittedChangeList) cls[0]).getVcs().getName())));
+ (SvnVcs.getInstance(project).getName().equals(((CommittedChangeList) cls[0]).getVcs().getName())) &&
+ (((SvnChangeList) cls[0]).getVcsRoot() != null));
}
public void actionPerformed(final AnActionEvent e) {
diff --git a/plugins/svn4idea/src/org/jetbrains/idea/svn/history/SvnChangeList.java b/plugins/svn4idea/src/org/jetbrains/idea/svn/history/SvnChangeList.java
index b4b8f679fa82c..3d53cb50a6f5d 100644
--- a/plugins/svn4idea/src/org/jetbrains/idea/svn/history/SvnChangeList.java
+++ b/plugins/svn4idea/src/org/jetbrains/idea/svn/history/SvnChangeList.java
@@ -65,6 +65,7 @@ public class SvnChangeList implements CommittedChangeList {
private Set<String> myAddedPaths = new HashSet<String>();
private Set<String> myDeletedPaths = new HashSet<String>();
private List<Change> myChanges;
+ private final String myCommonPathRoot;
private SVNURL myBranchUrl;
private VirtualFile myVcsRoot;
@@ -87,10 +88,14 @@ public SvnChangeList(SvnVcs vcs, @NotNull final SvnRepositoryLocation location,
myRepositoryRoot = repositoryRoot.endsWith("/") ? repositoryRoot.substring(0, repositoryRoot.length() - 1) : repositoryRoot;
+ final CommonPathSearcher commonPathSearcher = new CommonPathSearcher();
+
for(Object o: logEntry.getChangedPaths().values()) {
final SVNLogEntryPath entry = (SVNLogEntryPath) o;
final String path = entry.getPath();
+ commonPathSearcher.next(path);
+
if (entry.getType() == 'A') {
if (entry.getCopyPath() != null) {
myCopiedAddedPaths.put(path, entry.getCopyPath());
@@ -105,6 +110,8 @@ else if (entry.getType() == 'D') {
}
}
+ myCommonPathRoot = commonPathSearcher.getCommon();
+
updateCachedInfo();
}
@@ -112,6 +119,17 @@ public SvnChangeList(SvnVcs vcs, @NotNull SvnRepositoryLocation location, DataIn
myVcs = vcs;
myLocation = location;
readFromStream(stream, supportsCopyFromInfo);
+ final CommonPathSearcher commonPathSearcher = new CommonPathSearcher();
+ for (String path : myAddedPaths) {
+ commonPathSearcher.next(path);
+ }
+ for (String path : myDeletedPaths) {
+ commonPathSearcher.next(path);
+ }
+ for (String path : myChangedPaths) {
+ commonPathSearcher.next(path);
+ }
+ myCommonPathRoot = commonPathSearcher.getCommon();
updateCachedInfo();
}
@@ -395,34 +413,40 @@ public VirtualFile getVcsRoot() {
return myVcsRoot;
}
- @Nullable
- private String getAnyPath() {
- if (! myAddedPaths.isEmpty()) {
- return myAddedPaths.iterator().next();
- }
- if (! myDeletedPaths.isEmpty()) {
- return myDeletedPaths.iterator().next();
+ private static class CommonPathSearcher {
+ private String myCommon;
+
+ public void next(final String value) {
+ if (value == null) {
+ return;
+ }
+ if (myCommon == null) {
+ myCommon = value;
+ return;
+ }
+
+ if (value.startsWith(myCommon)) {
+ return;
+ }
+
+ myCommon = SVNPathUtil.getCommonPathAncestor(myCommon, value);
}
- if (! myChangedPaths.isEmpty()) {
- return myChangedPaths.iterator().next();
+
+ public String getCommon() {
+ return myCommon;
}
- return null;
}
private void updateCachedInfo() {
myCachedInfoLoaded = true;
- String anyRelativePath = getAnyPath();
- if (anyRelativePath == null) {
- return;
- }
- final String absolutePath = myRepositoryRoot + (anyRelativePath.startsWith("/") ? anyRelativePath : ("/" + anyRelativePath));
+ final String absolutePath = myRepositoryRoot + (myCommonPathRoot.startsWith("/") ? myCommonPathRoot : ("/" + myCommonPathRoot));
myVcsRoot = myVcs.getSvnFileUrlMapping().getVcRootByUrl(absolutePath);
if (myVcsRoot == null) {
return;
}
-
+
myBranchUrl = getBranchForUrl(myVcsRoot, absolutePath);
}
|
02ffd6ae37a11cb0acc920bada87c994559d396b
|
drools
|
BZ-1044973 - Guided rule editor does not let the- user to set objects as a parameters for method calls that have objects super- type as a parameter--
|
c
|
https://github.com/kiegroup/drools
|
diff --git a/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/ActionCallMethodBuilder.java b/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/ActionCallMethodBuilder.java
index 8a5938ec0a4..08156a0452e 100644
--- a/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/ActionCallMethodBuilder.java
+++ b/drools-workbench-models/drools-workbench-models-commons/src/main/java/org/drools/workbench/models/commons/backend/rule/ActionCallMethodBuilder.java
@@ -116,16 +116,27 @@ private String getDataType( String param,
}
private MethodInfo getMethodInfo() {
- String variableType = boundParams.get( variable );
- if ( variableType != null ) {
- List<MethodInfo> methods = getMethodInfosForType( model,
- dmo,
- variableType );
- if ( methods != null ) {
- for ( MethodInfo method : methods ) {
- if ( method.getName().equals( methodName ) ) {
- return method;
+ String variableType = boundParams.get(variable);
+ if (variableType != null) {
+ List<MethodInfo> methods = getMethodInfosForType(model,
+ dmo,
+ variableType);
+ if (methods != null) {
+
+ ArrayList<MethodInfo> methodInfos = getMethodInfos(methodName, methods);
+
+ if (methodInfos.size() > 1) {
+ // Now if there were more than one method with the same name
+ // we need to start figuring out what is the correct one.
+ for (MethodInfo methodInfo : methodInfos) {
+ if (compareParameters(methodInfo.getParams())) {
+ return methodInfo;
+ }
}
+ } else if (!methodInfos.isEmpty()){
+ // Not perfect, but works on most cases.
+ // There is no check if the parameter types match.
+ return methodInfos.get(0);
}
}
}
@@ -133,4 +144,27 @@ private MethodInfo getMethodInfo() {
return null;
}
+ private ArrayList<MethodInfo> getMethodInfos(String methodName, List<MethodInfo> methods) {
+ ArrayList<MethodInfo> result = new ArrayList<MethodInfo>();
+ for (MethodInfo method : methods) {
+ if (method.getName().equals(methodName)) {
+ result.add(method);
+ }
+ }
+ return result;
+ }
+
+ private boolean compareParameters(List<String> methodParams) {
+ if (methodParams.size() != parameters.length) {
+ return false;
+ } else {
+ for (int index = 0; index < methodParams.size(); index++) {
+ if (!methodParams.get(index).equals(boundParams.get(parameters[index]))) {
+ return false;
+ }
+ }
+ return true;
+ }
+ }
+
}
diff --git a/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceUnmarshallingTest.java b/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceUnmarshallingTest.java
index 9bf47778577..a1de643d986 100644
--- a/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceUnmarshallingTest.java
+++ b/drools-workbench-models/drools-workbench-models-commons/src/test/java/org/drools/workbench/models/commons/backend/rule/RuleModelDRLPersistenceUnmarshallingTest.java
@@ -3273,7 +3273,6 @@ public void testDSLExpansionRHS() {
}
@Test
- @Ignore(" Still does not know the difference between indexOf(int) and indexOf(String) ")
public void testFunctionCalls() {
String drl =
"package org.mortgages\n" +
@@ -3289,21 +3288,19 @@ public void testFunctionCalls() {
+ "end\n";
Map<String, List<MethodInfo>> methodInformation = new HashMap<String, List<MethodInfo>>();
- List<MethodInfo> mapMethodInformation1 = new ArrayList<MethodInfo>();
- mapMethodInformation1.add( new MethodInfo( "indexOf",
+ List<MethodInfo> mapMethodInformation = new ArrayList<MethodInfo>();
+ mapMethodInformation.add( new MethodInfo( "indexOf",
Arrays.asList( new String[]{ "String" } ),
"int",
null,
"String" ) );
- List<MethodInfo> mapMethodInformation2 = new ArrayList<MethodInfo>();
- mapMethodInformation2.add( new MethodInfo( "indexOf",
+ mapMethodInformation.add( new MethodInfo( "indexOf",
Arrays.asList( new String[]{ "Integer" } ),
"int",
null,
"String" ) );
- methodInformation.put( "java.lang.String", mapMethodInformation2 );
- methodInformation.put( "java.lang.String", mapMethodInformation1 );
+ methodInformation.put( "java.lang.String", mapMethodInformation );
when( dmo.getProjectMethodInformation() ).thenReturn( methodInformation );
|
f0bb45ae2869920cfa29a46ea7704b1a6c69ab37
|
spring-framework
|
included qualifier value in debug log for each- transaction (SPR-6811)--
|
p
|
https://github.com/spring-projects/spring-framework
|
diff --git a/org.springframework.transaction/src/main/java/org/springframework/transaction/interceptor/AbstractFallbackTransactionAttributeSource.java b/org.springframework.transaction/src/main/java/org/springframework/transaction/interceptor/AbstractFallbackTransactionAttributeSource.java
index 86c6b5281244..01dbf705e6d7 100644
--- a/org.springframework.transaction/src/main/java/org/springframework/transaction/interceptor/AbstractFallbackTransactionAttributeSource.java
+++ b/org.springframework.transaction/src/main/java/org/springframework/transaction/interceptor/AbstractFallbackTransactionAttributeSource.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2008 the original author or authors.
+ * Copyright 2002-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -103,7 +103,7 @@ public TransactionAttribute getTransactionAttribute(Method method, Class targetC
}
else {
if (logger.isDebugEnabled()) {
- logger.debug("Adding transactional method [" + method.getName() + "] with attribute [" + txAtt + "]");
+ logger.debug("Adding transactional method '" + method.getName() + "' with attribute: " + txAtt);
}
this.attributeCache.put(cacheKey, txAtt);
}
diff --git a/org.springframework.transaction/src/main/java/org/springframework/transaction/interceptor/DefaultTransactionAttribute.java b/org.springframework.transaction/src/main/java/org/springframework/transaction/interceptor/DefaultTransactionAttribute.java
index d9a9cf515e7b..3bae2f663d05 100644
--- a/org.springframework.transaction/src/main/java/org/springframework/transaction/interceptor/DefaultTransactionAttribute.java
+++ b/org.springframework.transaction/src/main/java/org/springframework/transaction/interceptor/DefaultTransactionAttribute.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2009 the original author or authors.
+ * Copyright 2002-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -94,4 +94,17 @@ public boolean rollbackOn(Throwable ex) {
return (ex instanceof RuntimeException || ex instanceof Error);
}
+
+ /**
+ * Return an identifying description for this transaction attribute.
+ * <p>Available to subclasses, for inclusion in their <code>toString()</code> result.
+ */
+ protected final StringBuilder getAttributeDescription() {
+ StringBuilder result = getDefinitionDescription();
+ if (this.qualifier != null) {
+ result.append("; '").append(this.qualifier).append("'");
+ }
+ return result;
+ }
+
}
diff --git a/org.springframework.transaction/src/main/java/org/springframework/transaction/interceptor/RuleBasedTransactionAttribute.java b/org.springframework.transaction/src/main/java/org/springframework/transaction/interceptor/RuleBasedTransactionAttribute.java
index 87a5da020413..d67368843f36 100644
--- a/org.springframework.transaction/src/main/java/org/springframework/transaction/interceptor/RuleBasedTransactionAttribute.java
+++ b/org.springframework.transaction/src/main/java/org/springframework/transaction/interceptor/RuleBasedTransactionAttribute.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2002-2008 the original author or authors.
+ * Copyright 2002-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -30,7 +30,7 @@
* both positive and negative. If no rules are relevant to the exception, it
* behaves like DefaultTransactionAttribute (rolling back on runtime exceptions).
*
- * <p>TransactionAttributeEditor creates objects of this class.
+ * <p>{@link TransactionAttributeEditor} creates objects of this class.
*
* @author Rod Johnson
* @author Juergen Hoeller
@@ -159,7 +159,7 @@ public boolean rollbackOn(Throwable ex) {
@Override
public String toString() {
- StringBuilder result = getDefinitionDescription();
+ StringBuilder result = getAttributeDescription();
if (this.rollbackRules != null) {
for (RollbackRuleAttribute rule : this.rollbackRules) {
String sign = (rule instanceof NoRollbackRuleAttribute ? PREFIX_COMMIT_RULE : PREFIX_ROLLBACK_RULE);
diff --git a/org.springframework.transaction/src/test/resources/log4j.xml b/org.springframework.transaction/src/test/resources/log4j.xml
index 767b96d6206d..e684ff2b7e90 100644
--- a/org.springframework.transaction/src/test/resources/log4j.xml
+++ b/org.springframework.transaction/src/test/resources/log4j.xml
@@ -15,7 +15,7 @@
<level value="warn" />
</logger>
- <logger name="org.springframework.binding">
+ <logger name="org.springframework.transaction">
<level value="debug" />
</logger>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.